1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.125git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E',
199 # os.path.samefile is available only under Unix
200 def os_path_samefile(path1, path2):
202 return os.path.samefile(path1, path2)
204 return os.path.realpath(path1) == os.path.realpath(path2)
207 """represent a file, including its metadata"""
208 def __init__(self, name, md5, size, mtime):
218 """Source service content
221 """creates an empty serviceinfo instance"""
224 def read(self, serviceinfo_node):
225 """read in the source services <services> element passed as
228 if serviceinfo_node == None:
231 services = serviceinfo_node.findall('service')
233 for service in services:
234 name = service.get('name')
236 for param in service.findall('param'):
237 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 self.commands.append(name)
242 msg = 'invalid service format:\n%s' % ET.tostring(root)
243 raise oscerr.APIError(msg)
245 def execute(self, dir):
248 for call in self.commands:
249 temp_dir = tempfile.mkdtemp()
250 name = call.split(None, 1)[0]
251 if not os.path.exists("/usr/lib/obs/service/"+name):
252 msg = "ERROR: service is not installed !"
253 msg += "Can maybe solved with: zypper in obs-server-" + name
254 raise oscerr.APIError(msg)
255 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
256 ret = subprocess.call(c, shell=True)
258 print "ERROR: service call failed: " + c
260 for file in os.listdir(temp_dir):
261 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
265 """linkinfo metadata (which is part of the xml representing a directory
268 """creates an empty linkinfo instance"""
278 def read(self, linkinfo_node):
279 """read in the linkinfo metadata from the <linkinfo> element passed as
281 If the passed element is None, the method does nothing.
283 if linkinfo_node == None:
285 self.project = linkinfo_node.get('project')
286 self.package = linkinfo_node.get('package')
287 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
288 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
289 self.srcmd5 = linkinfo_node.get('srcmd5')
290 self.error = linkinfo_node.get('error')
291 self.rev = linkinfo_node.get('rev')
292 self.baserev = linkinfo_node.get('baserev')
295 """returns True if the linkinfo is not empty, otherwise False"""
296 if self.xsrcmd5 or self.lsrcmd5:
300 def isexpanded(self):
301 """returns True if the package is an expanded link"""
302 if self.lsrcmd5 and not self.xsrcmd5:
307 """returns True if the link is in error state (could not be applied)"""
313 """return an informatory string representation"""
314 if self.islink() and not self.isexpanded():
315 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
316 % (self.project, self.package, self.xsrcmd5, self.rev)
317 elif self.islink() and self.isexpanded():
319 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
320 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
322 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
323 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
329 """represent a project directory, holding packages"""
330 def __init__(self, dir, getPackageList=True, progress_obj=None):
333 self.absdir = os.path.abspath(dir)
334 self.progress_obj = progress_obj
336 self.name = store_read_project(self.dir)
337 self.apiurl = store_read_apiurl(self.dir)
340 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
342 self.pacs_available = []
344 if conf.config['do_package_tracking']:
345 self.pac_root = self.read_packages().getroot()
346 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
347 self.pacs_excluded = [ i for i in os.listdir(self.dir)
348 for j in conf.config['exclude_glob']
349 if fnmatch.fnmatch(i, j) ]
350 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
351 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
352 # in the self.pacs_broken list
353 self.pacs_broken = []
354 for p in self.pacs_have:
355 if not os.path.isdir(os.path.join(self.absdir, p)):
356 # all states will be replaced with the '!'-state
357 # (except it is already marked as deleted ('D'-state))
358 self.pacs_broken.append(p)
360 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
362 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
364 def checkout_missing_pacs(self, expand_link=False):
365 for pac in self.pacs_missing:
367 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
368 # pac is not under version control but a local file/dir exists
369 msg = 'can\'t add package \'%s\': Object already exists' % pac
370 raise oscerr.PackageExists(self.name, pac, msg)
372 print 'checking out new package %s' % pac
373 checkout_package(self.apiurl, self.name, pac, \
374 pathname=getTransActPath(os.path.join(self.dir, pac)), \
375 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
377 def set_state(self, pac, state):
378 node = self.get_package_node(pac)
380 self.new_package_entry(pac, state)
382 node.attrib['state'] = state
384 def get_package_node(self, pac):
385 for node in self.pac_root.findall('package'):
386 if pac == node.get('name'):
390 def del_package_node(self, pac):
391 for node in self.pac_root.findall('package'):
392 if pac == node.get('name'):
393 self.pac_root.remove(node)
395 def get_state(self, pac):
396 node = self.get_package_node(pac)
398 return node.get('state')
402 def new_package_entry(self, name, state):
403 ET.SubElement(self.pac_root, 'package', name=name, state=state)
405 def read_packages(self):
406 packages_file = os.path.join(self.absdir, store, '_packages')
407 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
408 return ET.parse(packages_file)
410 # scan project for existing packages and migrate them
412 for data in os.listdir(self.dir):
413 pac_dir = os.path.join(self.absdir, data)
414 # we cannot use self.pacs_available because we cannot guarantee that the package list
415 # was fetched from the server
416 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
417 and Package(pac_dir).name == data:
418 cur_pacs.append(ET.Element('package', name=data, state=' '))
419 store_write_initial_packages(self.absdir, self.name, cur_pacs)
420 return ET.parse(os.path.join(self.absdir, store, '_packages'))
422 def write_packages(self):
423 # TODO: should we only modify the existing file instead of overwriting?
424 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
426 def addPackage(self, pac):
428 for i in conf.config['exclude_glob']:
429 if fnmatch.fnmatch(pac, i):
430 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
431 raise oscerr.OscIOError(None, msg)
432 state = self.get_state(pac)
433 if state == None or state == 'D':
434 self.new_package_entry(pac, 'A')
435 self.write_packages()
436 # sometimes the new pac doesn't exist in the list because
437 # it would take too much time to update all data structs regularly
438 if pac in self.pacs_unvers:
439 self.pacs_unvers.remove(pac)
441 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
443 def delPackage(self, pac, force = False):
444 state = self.get_state(pac.name)
446 if state == ' ' or state == 'D':
448 for file in pac.filenamelist + pac.filenamelist_unvers:
449 filestate = pac.status(file)
450 if filestate == 'M' or filestate == 'C' or \
451 filestate == 'A' or filestate == '?':
454 del_files.append(file)
455 if can_delete or force:
456 for file in del_files:
457 pac.delete_localfile(file)
458 if pac.status(file) != '?':
459 pac.delete_storefile(file)
460 # this is not really necessary
461 pac.put_on_deletelist(file)
462 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
464 pac.write_deletelist()
465 self.set_state(pac.name, 'D')
466 self.write_packages()
468 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
471 delete_dir(pac.absdir)
472 self.del_package_node(pac.name)
473 self.write_packages()
474 print statfrmt('D', pac.name)
476 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
478 print 'package is not under version control'
480 print 'unsupported state'
482 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
485 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
487 # we need to make sure that the _packages file will be written (even if an exception
490 # update complete project
491 # packages which no longer exists upstream
492 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
494 for pac in upstream_del:
495 p = Package(os.path.join(self.dir, pac))
496 self.delPackage(p, force = True)
497 delete_storedir(p.storedir)
502 self.pac_root.remove(self.get_package_node(p.name))
503 self.pacs_have.remove(pac)
505 for pac in self.pacs_have:
506 state = self.get_state(pac)
507 if pac in self.pacs_broken:
508 if self.get_state(pac) != 'A':
509 checkout_package(self.apiurl, self.name, pac,
510 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
511 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
514 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
540 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
542 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
543 elif state == 'A' and pac in self.pacs_available:
544 # file/dir called pac already exists and is under version control
545 msg = 'can\'t add package \'%s\': Object already exists' % pac
546 raise oscerr.PackageExists(self.name, pac, msg)
551 print 'unexpected state.. package \'%s\'' % pac
553 self.checkout_missing_pacs(expand_link=not unexpand_link)
555 self.write_packages()
557 def commit(self, pacs = (), msg = '', files = {}):
562 if files.has_key(pac):
564 state = self.get_state(pac)
566 self.commitNewPackage(pac, msg, todo)
568 self.commitDelPackage(pac)
570 # display the correct dir when sending the changes
571 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
574 p = Package(os.path.join(self.dir, pac))
577 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
578 print 'osc: \'%s\' is not under version control' % pac
579 elif pac in self.pacs_broken:
580 print 'osc: \'%s\' package not found' % pac
582 self.commitExtPackage(pac, msg, todo)
584 self.write_packages()
586 # if we have packages marked as '!' we cannot commit
587 for pac in self.pacs_broken:
588 if self.get_state(pac) != 'D':
589 msg = 'commit failed: package \'%s\' is missing' % pac
590 raise oscerr.PackageMissing(self.name, pac, msg)
592 for pac in self.pacs_have:
593 state = self.get_state(pac)
596 Package(os.path.join(self.dir, pac)).commit(msg)
598 self.commitDelPackage(pac)
600 self.commitNewPackage(pac, msg)
602 self.write_packages()
604 def commitNewPackage(self, pac, msg = '', files = []):
605 """creates and commits a new package if it does not exist on the server"""
606 if pac in self.pacs_available:
607 print 'package \'%s\' already exists' % pac
609 user = conf.get_apiurl_usr(self.apiurl)
610 edit_meta(metatype='pkg',
611 path_args=(quote_plus(self.name), quote_plus(pac)),
616 # display the correct dir when sending the changes
618 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
622 p = Package(os.path.join(self.dir, pac))
624 print statfrmt('Sending', os.path.normpath(p.dir))
626 self.set_state(pac, ' ')
629 def commitDelPackage(self, pac):
630 """deletes a package on the server and in the working copy"""
632 # display the correct dir when sending the changes
633 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
636 pac_dir = os.path.join(self.dir, pac)
637 p = Package(os.path.join(self.dir, pac))
638 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
639 delete_storedir(p.storedir)
645 pac_dir = os.path.join(self.dir, pac)
646 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
647 print statfrmt('Deleting', getTransActPath(pac_dir))
648 delete_package(self.apiurl, self.name, pac)
649 self.del_package_node(pac)
651 def commitExtPackage(self, pac, msg, files = []):
652 """commits a package from an external project"""
653 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
656 pac_path = os.path.join(self.dir, pac)
658 project = store_read_project(pac_path)
659 package = store_read_package(pac_path)
660 apiurl = store_read_apiurl(pac_path)
661 if meta_exists(metatype='pkg',
662 path_args=(quote_plus(project), quote_plus(package)),
664 create_new=False, apiurl=apiurl):
665 p = Package(pac_path)
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(project), quote_plus(package)),
676 p = Package(pac_path)
682 r.append('*****************************************************')
683 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
684 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
685 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
686 r.append('*****************************************************')
692 """represent a package (its directory) and read/keep/write its metadata"""
693 def __init__(self, workingdir, progress_obj=None):
694 self.dir = workingdir
695 self.absdir = os.path.abspath(self.dir)
696 self.storedir = os.path.join(self.absdir, store)
697 self.progress_obj = progress_obj
699 check_store_version(self.dir)
701 self.prjname = store_read_project(self.dir)
702 self.name = store_read_package(self.dir)
703 self.apiurl = store_read_apiurl(self.dir)
705 self.update_datastructs()
709 self.todo_delete = []
712 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
713 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
716 def addfile(self, n):
717 st = os.stat(os.path.join(self.dir, n))
718 f = File(n, None, st.st_size, st.st_mtime)
719 self.filelist.append(f)
720 self.filenamelist.append(n)
721 self.filenamelist_unvers.remove(n)
722 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
724 def delete_file(self, n, force=False):
725 """deletes a file if possible and marks the file as deleted"""
726 state = self.status(n)
727 if state in ['?', 'A', 'M'] and not force:
728 return (False, state)
729 self.delete_localfile(n)
731 self.put_on_deletelist(n)
732 self.write_deletelist()
734 self.delete_storefile(n)
737 def delete_storefile(self, n):
738 try: os.unlink(os.path.join(self.storedir, n))
741 def delete_localfile(self, n):
742 try: os.unlink(os.path.join(self.dir, n))
745 def put_on_deletelist(self, n):
746 if n not in self.to_be_deleted:
747 self.to_be_deleted.append(n)
749 def put_on_conflictlist(self, n):
750 if n not in self.in_conflict:
751 self.in_conflict.append(n)
753 def clear_from_conflictlist(self, n):
754 """delete an entry from the file, and remove the file if it would be empty"""
755 if n in self.in_conflict:
757 filename = os.path.join(self.dir, n)
758 storefilename = os.path.join(self.storedir, n)
759 myfilename = os.path.join(self.dir, n + '.mine')
760 if self.islinkrepair() or self.ispulled():
761 upfilename = os.path.join(self.dir, n + '.new')
763 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
766 os.unlink(myfilename)
767 # the working copy may be updated, so the .r* ending may be obsolete...
769 os.unlink(upfilename)
770 if self.islinkrepair() or self.ispulled():
771 os.unlink(os.path.join(self.dir, n + '.old'))
775 self.in_conflict.remove(n)
777 self.write_conflictlist()
779 def write_deletelist(self):
780 if len(self.to_be_deleted) == 0:
782 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
786 fname = os.path.join(self.storedir, '_to_be_deleted')
788 f.write('\n'.join(self.to_be_deleted))
792 def delete_source_file(self, n):
793 """delete local a source file"""
794 self.delete_localfile(n)
795 self.delete_storefile(n)
797 def delete_remote_source_file(self, n):
798 """delete a remote source file (e.g. from the server)"""
800 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
803 def put_source_file(self, n):
805 # escaping '+' in the URL path (note: not in the URL query string) is
806 # only a workaround for ruby on rails, which swallows it otherwise
808 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
809 http_PUT(u, file = os.path.join(self.dir, n))
811 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
813 def commit(self, msg=''):
814 # commit only if the upstream revision is the same as the working copy's
815 upstream_rev = self.latest_rev()
816 if self.rev != upstream_rev:
817 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
820 self.todo = self.filenamelist_unvers + self.filenamelist
822 pathn = getTransActPath(self.dir)
824 have_conflicts = False
825 for filename in self.todo:
826 if not filename.startswith('_service:') and not filename.startswith('_service_'):
827 st = self.status(filename)
828 if st == 'A' or st == 'M':
829 self.todo_send.append(filename)
830 print statfrmt('Sending', os.path.join(pathn, filename))
832 self.todo_delete.append(filename)
833 print statfrmt('Deleting', os.path.join(pathn, filename))
835 have_conflicts = True
838 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
841 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
842 print 'nothing to do for package %s' % self.name
845 if self.islink() and self.isexpanded():
846 # resolve the link into the upload revision
847 # XXX: do this always?
848 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
849 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
852 print 'Transmitting file data ',
854 for filename in self.todo_delete:
855 # do not touch local files on commit --
856 # delete remotely instead
857 self.delete_remote_source_file(filename)
858 self.to_be_deleted.remove(filename)
859 for filename in self.todo_send:
860 sys.stdout.write('.')
862 self.put_source_file(filename)
864 # all source files are committed - now comes the log
865 query = { 'cmd' : 'commit',
867 'user' : conf.get_apiurl_usr(self.apiurl),
869 if self.islink() and self.isexpanded():
870 query['keeplink'] = '1'
871 if conf.config['linkcontrol'] or self.isfrozen():
872 query['linkrev'] = self.linkinfo.srcmd5
874 query['repairlink'] = '1'
875 query['linkrev'] = self.get_pulled_srcmd5()
876 if self.islinkrepair():
877 query['repairlink'] = '1'
878 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
881 # delete upload revision
883 query = { 'cmd': 'deleteuploadrev' }
884 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
890 root = ET.parse(f).getroot()
891 self.rev = int(root.get('rev'))
893 print 'Committed revision %s.' % self.rev
896 os.unlink(os.path.join(self.storedir, '_pulled'))
897 if self.islinkrepair():
898 os.unlink(os.path.join(self.storedir, '_linkrepair'))
899 self.linkrepair = False
900 # XXX: mark package as invalid?
901 print 'The source link has been repaired. This directory can now be removed.'
902 if self.islink() and self.isexpanded():
903 self.update_local_filesmeta(revision=self.latest_rev())
905 self.update_local_filesmeta()
906 self.write_deletelist()
907 self.update_datastructs()
909 if self.filenamelist.count('_service'):
910 print 'The package contains a source service.'
911 for filename in self.todo:
912 if filename.startswith('_service:') and os.path.exists(filename):
913 os.unlink(filename) # remove local files
914 print_request_list(self.apiurl, self.prjname, self.name)
916 def write_conflictlist(self):
917 if len(self.in_conflict) == 0:
919 os.unlink(os.path.join(self.storedir, '_in_conflict'))
923 fname = os.path.join(self.storedir, '_in_conflict')
925 f.write('\n'.join(self.in_conflict))
929 def updatefile(self, n, revision):
930 filename = os.path.join(self.dir, n)
931 storefilename = os.path.join(self.storedir, n)
932 mtime = self.findfilebyname(n).mtime
934 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
935 os.utime(filename, (-1, mtime))
937 shutil.copyfile(filename, storefilename)
939 def mergefile(self, n):
940 filename = os.path.join(self.dir, n)
941 storefilename = os.path.join(self.storedir, n)
942 myfilename = os.path.join(self.dir, n + '.mine')
943 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
944 os.rename(filename, myfilename)
946 mtime = self.findfilebyname(n).mtime
947 get_source_file(self.apiurl, self.prjname, self.name, n,
948 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
949 os.utime(upfilename, (-1, mtime))
951 if binary_file(myfilename) or binary_file(upfilename):
953 shutil.copyfile(upfilename, filename)
954 shutil.copyfile(upfilename, storefilename)
955 self.in_conflict.append(n)
956 self.write_conflictlist()
960 # diff3 OPTIONS... MINE OLDER YOURS
961 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
962 # we would rather use the subprocess module, but it is not availablebefore 2.4
963 ret = subprocess.call(merge_cmd, shell=True)
965 # "An exit status of 0 means `diff3' was successful, 1 means some
966 # conflicts were found, and 2 means trouble."
968 # merge was successful... clean up
969 shutil.copyfile(upfilename, storefilename)
970 os.unlink(upfilename)
971 os.unlink(myfilename)
975 shutil.copyfile(upfilename, storefilename)
976 self.in_conflict.append(n)
977 self.write_conflictlist()
980 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
981 print >>sys.stderr, 'the command line was:'
982 print >>sys.stderr, merge_cmd
987 def update_local_filesmeta(self, revision=None):
989 Update the local _files file in the store.
990 It is replaced with the version pulled from upstream.
992 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
993 store_write_string(self.absdir, '_files', meta)
995 def update_datastructs(self):
997 Update the internal data structures if the local _files
998 file has changed (e.g. update_local_filesmeta() has been
1002 files_tree = read_filemeta(self.dir)
1003 files_tree_root = files_tree.getroot()
1005 self.rev = files_tree_root.get('rev')
1006 self.srcmd5 = files_tree_root.get('srcmd5')
1008 self.linkinfo = Linkinfo()
1009 self.linkinfo.read(files_tree_root.find('linkinfo'))
1011 self.filenamelist = []
1013 for node in files_tree_root.findall('entry'):
1015 f = File(node.get('name'),
1017 int(node.get('size')),
1018 int(node.get('mtime')))
1020 # okay, a very old version of _files, which didn't contain any metadata yet...
1021 f = File(node.get('name'), '', 0, 0)
1022 self.filelist.append(f)
1023 self.filenamelist.append(f.name)
1025 self.to_be_deleted = read_tobedeleted(self.dir)
1026 self.in_conflict = read_inconflict(self.dir)
1027 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1029 # gather unversioned files, but ignore some stuff
1030 self.excluded = [ i for i in os.listdir(self.dir)
1031 for j in conf.config['exclude_glob']
1032 if fnmatch.fnmatch(i, j) ]
1033 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1034 if i not in self.excluded
1035 if i not in self.filenamelist ]
1038 """tells us if the package is a link (has 'linkinfo').
1039 A package with linkinfo is a package which links to another package.
1040 Returns True if the package is a link, otherwise False."""
1041 return self.linkinfo.islink()
1043 def isexpanded(self):
1044 """tells us if the package is a link which is expanded.
1045 Returns True if the package is expanded, otherwise False."""
1046 return self.linkinfo.isexpanded()
1048 def islinkrepair(self):
1049 """tells us if we are repairing a broken source link."""
1050 return self.linkrepair
1053 """tells us if we have pulled a link."""
1054 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1057 """tells us if the link is frozen."""
1058 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1060 def get_pulled_srcmd5(self):
1062 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1063 pulledrev = line.strip()
1066 def haslinkerror(self):
1068 Returns True if the link is broken otherwise False.
1069 If the package is not a link it returns False.
1071 return self.linkinfo.haserror()
1073 def linkerror(self):
1075 Returns an error message if the link is broken otherwise None.
1076 If the package is not a link it returns None.
1078 return self.linkinfo.error
1080 def update_local_pacmeta(self):
1082 Update the local _meta file in the store.
1083 It is replaced with the version pulled from upstream.
1085 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1086 store_write_string(self.absdir, '_meta', meta)
1088 def findfilebyname(self, n):
1089 for i in self.filelist:
1093 def status(self, n):
1097 file storefile file present STATUS
1098 exists exists in _files
1101 x x x ' ' if digest differs: 'M'
1102 and if in conflicts file: 'C'
1104 x - x 'D' and listed in _to_be_deleted
1106 - x - 'D' (when file in working copy is already deleted)
1107 - - x 'F' (new in repo, but not yet in working copy)
1112 known_by_meta = False
1114 exists_in_store = False
1115 if n in self.filenamelist:
1116 known_by_meta = True
1117 if os.path.exists(os.path.join(self.absdir, n)):
1119 if os.path.exists(os.path.join(self.storedir, n)):
1120 exists_in_store = True
1123 if exists and not exists_in_store and known_by_meta:
1125 elif n in self.to_be_deleted:
1127 elif n in self.in_conflict:
1129 elif exists and exists_in_store and known_by_meta:
1130 #print self.findfilebyname(n)
1131 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1135 elif exists and not exists_in_store and not known_by_meta:
1137 elif exists and exists_in_store and not known_by_meta:
1139 elif not exists and exists_in_store and known_by_meta:
1141 elif not exists and not exists_in_store and known_by_meta:
1143 elif not exists and exists_in_store and not known_by_meta:
1145 elif not exists and not exists_in_store and not known_by_meta:
1146 # this case shouldn't happen (except there was a typo in the filename etc.)
1147 raise IOError('osc: \'%s\' is not under version control' % n)
1151 def comparePac(self, cmp_pac):
1153 This method compares the local filelist with
1154 the filelist of the passed package to see which files
1155 were added, removed and changed.
1162 for file in self.filenamelist+self.filenamelist_unvers:
1163 state = self.status(file)
1164 if state == 'A' and (not file in cmp_pac.filenamelist):
1165 added_files.append(file)
1166 elif file in cmp_pac.filenamelist and state == 'D':
1167 removed_files.append(file)
1168 elif state == ' ' and not file in cmp_pac.filenamelist:
1169 added_files.append(file)
1170 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1171 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1172 changed_files.append(file)
1173 for file in cmp_pac.filenamelist:
1174 if not file in self.filenamelist:
1175 removed_files.append(file)
1176 removed_files = set(removed_files)
1178 return changed_files, added_files, removed_files
1180 def merge(self, otherpac):
1181 self.todo += otherpac.todo
1195 '\n '.join(self.filenamelist),
1203 def read_meta_from_spec(self, spec = None):
1208 # scan for spec files
1209 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1210 if len(speclist) == 1:
1211 specfile = speclist[0]
1212 elif len(speclist) > 1:
1213 print 'the following specfiles were found:'
1214 for file in speclist:
1216 print 'please specify one with --specfile'
1219 print 'no specfile was found - please specify one ' \
1223 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1224 self.summary = data['Summary']
1225 self.url = data['Url']
1226 self.descr = data['%description']
1229 def update_package_meta(self, force=False):
1231 for the updatepacmetafromspec subcommand
1232 argument force supress the confirm question
1235 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1237 root = ET.fromstring(m)
1238 root.find('title').text = self.summary
1239 root.find('description').text = ''.join(self.descr)
1240 url = root.find('url')
1242 url = ET.SubElement(root, 'url')
1245 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1246 mf = metafile(u, ET.tostring(root))
1249 print '*' * 36, 'old', '*' * 36
1251 print '*' * 36, 'new', '*' * 36
1252 print ET.tostring(root)
1254 repl = raw_input('Write? (y/N/e) ')
1265 def mark_frozen(self):
1266 store_write_string(self.absdir, '_frozenlink', '')
1268 print "The link in this package is currently broken. Checking"
1269 print "out the last working version instead; please use 'osc pull'"
1270 print "to repair the link."
1273 def unmark_frozen(self):
1274 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1275 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1277 def latest_rev(self):
1278 if self.islinkrepair():
1279 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1280 elif self.islink() and self.isexpanded():
1281 if self.isfrozen() or self.ispulled():
1282 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1285 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1288 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1290 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1293 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1296 def update(self, rev = None, service_files = False):
1297 # save filelist and (modified) status before replacing the meta file
1298 saved_filenames = self.filenamelist
1299 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1302 self.update_local_filesmeta(rev)
1303 self = Package(self.dir, progress_obj=self.progress_obj)
1305 # which files do no longer exist upstream?
1306 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1308 pathn = getTransActPath(self.dir)
1310 for filename in saved_filenames:
1311 if not filename.startswith('_service:') and filename in disappeared:
1312 print statfrmt('D', os.path.join(pathn, filename))
1313 # keep file if it has local modifications
1314 if oldp.status(filename) == ' ':
1315 self.delete_localfile(filename)
1316 self.delete_storefile(filename)
1318 for filename in self.filenamelist:
1320 state = self.status(filename)
1321 if not service_files and filename.startswith('_service:'):
1323 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1324 # no merge necessary... local file is changed, but upstream isn't
1326 elif state == 'M' and filename in saved_modifiedfiles:
1327 status_after_merge = self.mergefile(filename)
1328 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1330 self.updatefile(filename, rev)
1331 print statfrmt('U', os.path.join(pathn, filename))
1333 self.updatefile(filename, rev)
1334 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1336 self.updatefile(filename, rev)
1337 print statfrmt('A', os.path.join(pathn, filename))
1338 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1339 self.updatefile(filename, rev)
1340 self.delete_storefile(filename)
1341 print statfrmt('U', os.path.join(pathn, filename))
1345 self.update_local_pacmeta()
1347 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1348 print 'At revision %s.' % self.rev
1350 if not service_files:
1351 self.run_source_services()
1353 def run_source_services(self):
1354 if self.filenamelist.count('_service'):
1355 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1358 si.execute(self.absdir)
1360 def prepare_filelist(self):
1361 """Prepare a list of files, which will be processed by process_filelist
1362 method. This allows easy modifications of a file list in commit
1366 self.todo = self.filenamelist + self.filenamelist_unvers
1370 for f in (f for f in self.todo if not os.path.isdir(f)):
1372 status = self.status(f)
1375 ret += "%s %s %s\n" % (action, status, f)
1378 # Edit a filelist for package %s
1380 # l, leave = leave a file as is
1381 # r, remove = remove a file
1382 # a, add = add a file
1384 # If you remove file from a list, it will be unchanged
1385 # If you remove all, commit will be aborted"""
1389 def edit_filelist(self):
1390 """Opens a package list in editor for eediting. This allows easy
1391 modifications of it just by simple text editing
1395 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1396 f = os.fdopen(fd, 'w')
1397 f.write(self.prepare_filelist())
1399 mtime_orig = os.stat(filename).st_mtime
1401 if sys.platform[:3] != 'win':
1402 editor = os.getenv('EDITOR', default='vim')
1404 editor = os.getenv('EDITOR', default='notepad')
1406 subprocess.call('%s %s' % (editor, filename), shell=True)
1407 mtime = os.stat(filename).st_mtime
1408 if mtime_orig < mtime:
1409 filelist = open(filename).readlines()
1413 raise oscerr.UserAbort()
1415 return self.process_filelist(filelist)
1417 def process_filelist(self, filelist):
1418 """Process a filelist - it add/remove or leave files. This depends on
1419 user input. If no file is processed, it raises an ValueError
1423 for line in (l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')):
1425 foo = line.split(' ')
1427 action, state, name = (foo[0], ' ', foo[3])
1429 action, state, name = (foo[0], foo[1], foo[2])
1432 action = action.lower()
1435 if action in ('r', 'remove'):
1436 if self.status(name) == '?':
1438 if name in self.todo:
1439 self.todo.remove(name)
1441 self.delete_file(name, True)
1442 elif action in ('a', 'add'):
1443 if self.status(name) != '?':
1444 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1447 elif action in ('l', 'leave'):
1450 raise ValueError("Unknow action `%s'" % action)
1453 raise ValueError("Empty filelist")
1456 """for objects to represent the review state in a request"""
1457 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1459 self.by_user = by_user
1460 self.by_group = by_group
1463 self.comment = comment
1466 """for objects to represent the "state" of a request"""
1467 def __init__(self, name=None, who=None, when=None, comment=None):
1471 self.comment = comment
1474 """represents an action"""
1475 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1477 self.src_project = src_project
1478 self.src_package = src_package
1479 self.src_rev = src_rev
1480 self.dst_project = dst_project
1481 self.dst_package = dst_package
1482 self.src_update = src_update
1485 """represent a request and holds its metadata
1486 it has methods to read in metadata from xml,
1487 different views, ..."""
1490 self.state = RequestState()
1493 self.last_author = None
1496 self.statehistory = []
1499 def read(self, root):
1500 self.reqid = int(root.get('id'))
1501 actions = root.findall('action')
1502 if len(actions) == 0:
1503 actions = [ root.find('submit') ] # for old style requests
1505 for action in actions:
1506 type = action.get('type', 'submit')
1508 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1509 if action.findall('source'):
1510 n = action.find('source')
1511 src_prj = n.get('project', None)
1512 src_pkg = n.get('package', None)
1513 src_rev = n.get('rev', None)
1514 if action.findall('target'):
1515 n = action.find('target')
1516 dst_prj = n.get('project', None)
1517 dst_pkg = n.get('package', None)
1518 if action.findall('options'):
1519 n = action.find('options')
1520 if n.findall('sourceupdate'):
1521 src_update = n.find('sourceupdate').text.strip()
1522 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1524 msg = 'invalid request format:\n%s' % ET.tostring(root)
1525 raise oscerr.APIError(msg)
1528 n = root.find('state')
1529 self.state.name, self.state.who, self.state.when \
1530 = n.get('name'), n.get('who'), n.get('when')
1532 self.state.comment = n.find('comment').text.strip()
1534 self.state.comment = None
1536 # read the review states
1537 for r in root.findall('review'):
1539 s.state = r.get('state')
1540 s.by_user = r.get('by_user')
1541 s.by_group = r.get('by_group')
1542 s.who = r.get('who')
1543 s.when = r.get('when')
1545 s.comment = r.find('comment').text.strip()
1548 self.reviews.append(s)
1550 # read the state history
1551 for h in root.findall('history'):
1553 s.name = h.get('name')
1554 s.who = h.get('who')
1555 s.when = h.get('when')
1557 s.comment = h.find('comment').text.strip()
1560 self.statehistory.append(s)
1561 self.statehistory.reverse()
1563 # read a description, if it exists
1565 n = root.find('description').text
1570 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1571 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1572 dst_prj, dst_pkg, src_update)
1575 def list_view(self):
1576 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1578 for a in self.actions:
1579 dst = "%s/%s" % (a.dst_project, a.dst_package)
1580 if a.src_package == a.dst_package:
1584 if a.type=="submit":
1585 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1586 if a.type=="change_devel":
1587 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1588 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1590 ret += '\n %s: %-50s %-20s ' % \
1591 (a.type, sr_source, dst)
1593 if self.statehistory and self.statehistory[0]:
1595 for h in self.statehistory:
1596 who.append("%s(%s)" % (h.who,h.name))
1598 ret += "\n From: %s" % (' -> '.join(who))
1600 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1602 lines = txt.splitlines()
1603 wrapper = textwrap.TextWrapper( width = 80,
1604 initial_indent=' Descr: ',
1605 subsequent_indent=' ')
1606 ret += "\n" + wrapper.fill(lines[0])
1607 wrapper.initial_indent = ' '
1608 for line in lines[1:]:
1609 ret += "\n" + wrapper.fill(line)
1615 def __cmp__(self, other):
1616 return cmp(self.reqid, other.reqid)
1620 for action in self.actions:
1621 action_list=" %s: " % (action.type)
1622 if action.type=="submit":
1625 r="(r%s)" % (action.src_rev)
1627 if action.src_update:
1628 m="(%s)" % (action.src_update)
1629 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1630 if action.dst_package:
1631 action_list=action_list+"/%s" % ( action.dst_package )
1632 elif action.type=="delete":
1633 action_list=action_list+" %s" % ( action.dst_project )
1634 if action.dst_package:
1635 action_list=action_list+"/%s" % ( action.dst_package )
1636 elif action.type=="change_devel":
1637 action_list=action_list+" %s/%s developed in %s/%s" % \
1638 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1639 action_list=action_list+"\n"
1654 self.state.name, self.state.when, self.state.who,
1657 if len(self.reviews):
1658 reviewitems = [ '%-10s %s %s %s %s %s' \
1659 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1660 for i in self.reviews ]
1661 s += '\nReview: ' + '\n '.join(reviewitems)
1664 if len(self.statehistory):
1665 histitems = [ '%-10s %s %s' \
1666 % (i.name, i.when, i.who) \
1667 for i in self.statehistory ]
1668 s += '\nHistory: ' + '\n '.join(histitems)
1675 """format time as Apr 02 18:19
1677 depending on whether it is in the current year
1681 if time.localtime()[0] == time.localtime(t)[0]:
1683 return time.strftime('%b %d %H:%M',time.localtime(t))
1685 return time.strftime('%b %d %Y',time.localtime(t))
1688 def is_project_dir(d):
1689 return os.path.exists(os.path.join(d, store, '_project')) and not \
1690 os.path.exists(os.path.join(d, store, '_package'))
1693 def is_package_dir(d):
1694 return os.path.exists(os.path.join(d, store, '_project')) and \
1695 os.path.exists(os.path.join(d, store, '_package'))
1697 def parse_disturl(disturl):
1698 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1699 revision), else raises an oscerr.WrongArgs exception
1702 m = DISTURL_RE.match(disturl)
1704 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1706 apiurl = m.group('apiurl')
1707 if apiurl.split('.')[0] != 'api':
1708 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1709 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1711 def parse_buildlogurl(buildlogurl):
1712 """Parse a build log url, returns a tuple (apiurl, project, package,
1713 repository, arch), else raises oscerr.WrongArgs exception"""
1715 global BUILDLOGURL_RE
1717 m = BUILDLOGURL_RE.match(buildlogurl)
1719 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1721 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1724 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1725 This is handy to allow copy/paste a project/package combination in this form.
1727 Trailing slashes are removed before the split, because the split would
1728 otherwise give an additional empty string.
1736 def expand_proj_pack(args, idx=0, howmany=0):
1737 """looks for occurance of '.' at the position idx.
1738 If howmany is 2, both proj and pack are expanded together
1739 using the current directory, or none of them, if not possible.
1740 If howmany is 0, proj is expanded if possible, then, if there
1741 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1742 expanded, if possible.
1743 If howmany is 1, only proj is expanded if possible.
1745 If args[idx] does not exists, an implicit '.' is assumed.
1746 if not enough elements up to idx exist, an error is raised.
1748 See also parseargs(args), slash_split(args), findpacs(args)
1749 All these need unification, somehow.
1752 # print args,idx,howmany
1755 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1757 if len(args) == idx:
1759 if args[idx+0] == '.':
1760 if howmany == 0 and len(args) > idx+1:
1761 if args[idx+1] == '.':
1763 # remove one dot and make sure to expand both proj and pack
1768 # print args,idx,howmany
1770 args[idx+0] = store_read_project('.')
1773 package = store_read_package('.')
1774 args.insert(idx+1, package)
1778 package = store_read_package('.')
1779 args.insert(idx+1, package)
1783 def findpacs(files, progress_obj=None):
1784 """collect Package objects belonging to the given files
1785 and make sure each Package is returned only once"""
1788 p = filedir_to_pac(f, progress_obj)
1791 if i.name == p.name:
1801 def read_filemeta(dir):
1803 r = ET.parse(os.path.join(dir, store, '_files'))
1804 except SyntaxError, e:
1805 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1806 'When parsing .osc/_files, the following error was encountered:\n'
1811 def read_tobedeleted(dir):
1813 fname = os.path.join(dir, store, '_to_be_deleted')
1815 if os.path.exists(fname):
1816 r = [ line.strip() for line in open(fname) ]
1821 def read_inconflict(dir):
1823 fname = os.path.join(dir, store, '_in_conflict')
1825 if os.path.exists(fname):
1826 r = [ line.strip() for line in open(fname) ]
1831 def parseargs(list_of_args):
1832 """Convenience method osc's commandline argument parsing.
1834 If called with an empty tuple (or list), return a list containing the current directory.
1835 Otherwise, return a list of the arguments."""
1837 return list(list_of_args)
1842 def filedir_to_pac(f, progress_obj=None):
1843 """Takes a working copy path, or a path to a file inside a working copy,
1844 and returns a Package object instance
1846 If the argument was a filename, add it onto the "todo" list of the Package """
1848 if os.path.isdir(f):
1850 p = Package(wd, progress_obj=progress_obj)
1853 wd = os.path.dirname(f)
1856 p = Package(wd, progress_obj=progress_obj)
1857 p.todo = [ os.path.basename(f) ]
1862 def statfrmt(statusletter, filename):
1863 return '%s %s' % (statusletter, filename)
1866 def pathjoin(a, *p):
1867 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1868 path = os.path.join(a, *p)
1869 if path.startswith('./'):
1874 def makeurl(baseurl, l, query=[]):
1875 """Given a list of path compoments, construct a complete URL.
1877 Optional parameters for a query string can be given as a list, as a
1878 dictionary, or as an already assembled string.
1879 In case of a dictionary, the parameters will be urlencoded by this
1880 function. In case of a list not -- this is to be backwards compatible.
1883 if conf.config['verbose'] > 1:
1884 print 'makeurl:', baseurl, l, query
1886 if type(query) == type(list()):
1887 query = '&'.join(query)
1888 elif type(query) == type(dict()):
1889 query = urlencode(query)
1891 scheme, netloc = urlsplit(baseurl)[0:2]
1892 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1895 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1896 """wrapper around urllib2.urlopen for error handling,
1897 and to support additional (PUT, DELETE) methods"""
1901 if conf.config['http_debug']:
1904 print '--', method, url
1906 if method == 'POST' and not file and not data:
1907 # adding data to an urllib2 request transforms it into a POST
1910 req = urllib2.Request(url)
1912 api_host_options=conf.get_apiurl_api_host_options(url)
1914 for header, value in api_host_options['http_headers']:
1915 req.add_header(header, value)
1917 req.get_method = lambda: method
1919 # POST requests are application/x-www-form-urlencoded per default
1920 # since we change the request into PUT, we also need to adjust the content type header
1921 if method == 'PUT' or (method == 'POST' and data):
1922 req.add_header('Content-Type', 'application/octet-stream')
1924 if type(headers) == type({}):
1925 for i in headers.keys():
1927 req.add_header(i, headers[i])
1929 if file and not data:
1930 size = os.path.getsize(file)
1932 data = open(file, 'rb').read()
1935 filefd = open(file, 'rb')
1937 if sys.platform[:3] != 'win':
1938 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1940 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1942 except EnvironmentError, e:
1944 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1945 '\non a filesystem which does not support this.' % (e, file))
1946 elif hasattr(e, 'winerror') and e.winerror == 5:
1947 # falling back to the default io
1948 data = open(file, 'rb').read()
1952 if conf.config['debug']: print method, url
1954 old_timeout = socket.getdefaulttimeout()
1955 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1956 if old_timeout != timeout and not api_host_options['sslcertck']:
1957 socket.setdefaulttimeout(timeout)
1959 fd = urllib2.urlopen(req, data=data)
1961 if old_timeout != timeout and not api_host_options['sslcertck']:
1962 socket.setdefaulttimeout(old_timeout)
1963 if hasattr(conf.cookiejar, 'save'):
1964 conf.cookiejar.save(ignore_discard=True)
1966 if filefd: filefd.close()
1971 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1972 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1973 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1974 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1977 def init_project_dir(apiurl, dir, project):
1978 if not os.path.exists(dir):
1979 if conf.config['checkout_no_colon']:
1980 os.makedirs(dir) # helpful with checkout_no_colon
1983 if not os.path.exists(os.path.join(dir, store)):
1984 os.mkdir(os.path.join(dir, store))
1986 # print 'project=',project,' dir=',dir
1987 store_write_project(dir, project)
1988 store_write_apiurl(dir, apiurl)
1989 if conf.config['do_package_tracking']:
1990 store_write_initial_packages(dir, project, [])
1992 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
1993 if not os.path.isdir(store):
1996 f = open('_project', 'w')
1997 f.write(project + '\n')
1999 f = open('_package', 'w')
2000 f.write(package + '\n')
2004 f = open('_files', 'w')
2005 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2009 ET.ElementTree(element=ET.Element('directory')).write('_files')
2011 f = open('_osclib_version', 'w')
2012 f.write(__store_version__ + '\n')
2015 store_write_apiurl(os.path.pardir, apiurl)
2021 def check_store_version(dir):
2022 versionfile = os.path.join(dir, store, '_osclib_version')
2024 v = open(versionfile).read().strip()
2029 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2030 if os.path.exists(os.path.join(dir, '.svn')):
2031 msg = msg + '\nTry svn instead of osc.'
2032 raise oscerr.NoWorkingCopy(msg)
2034 if v != __store_version__:
2035 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2036 # version is fine, no migration needed
2037 f = open(versionfile, 'w')
2038 f.write(__store_version__ + '\n')
2041 msg = 'The osc metadata of your working copy "%s"' % dir
2042 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2043 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2044 raise oscerr.WorkingCopyWrongVersion, msg
2047 def meta_get_packagelist(apiurl, prj):
2049 u = makeurl(apiurl, ['source', prj])
2051 root = ET.parse(f).getroot()
2052 return [ node.get('name') for node in root.findall('entry') ]
2055 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2056 """return a list of file names,
2057 or a list File() instances if verbose=True"""
2063 query['rev'] = revision
2065 query['rev'] = 'latest'
2067 u = makeurl(apiurl, ['source', prj, package], query=query)
2069 root = ET.parse(f).getroot()
2072 return [ node.get('name') for node in root.findall('entry') ]
2076 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2077 rev = root.get('rev')
2078 for node in root.findall('entry'):
2079 f = File(node.get('name'),
2081 int(node.get('size')),
2082 int(node.get('mtime')))
2088 def meta_get_project_list(apiurl):
2089 u = makeurl(apiurl, ['source'])
2091 root = ET.parse(f).getroot()
2092 return sorted([ node.get('name') for node in root ])
2095 def show_project_meta(apiurl, prj):
2096 url = makeurl(apiurl, ['source', prj, '_meta'])
2098 return f.readlines()
2101 def show_project_conf(apiurl, prj):
2102 url = makeurl(apiurl, ['source', prj, '_config'])
2104 return f.readlines()
2107 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2108 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2112 except urllib2.HTTPError, e:
2113 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2117 def show_package_meta(apiurl, prj, pac):
2118 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2121 return f.readlines()
2122 except urllib2.HTTPError, e:
2123 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2127 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2129 path.append('source')
2135 path.append('_attribute')
2137 path.append(attribute)
2140 query.append("with_default=1")
2142 query.append("with_project=1")
2143 url = makeurl(apiurl, path, query)
2146 return f.readlines()
2147 except urllib2.HTTPError, e:
2148 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2152 def show_develproject(apiurl, prj, pac):
2153 m = show_package_meta(apiurl, prj, pac)
2155 return ET.fromstring(''.join(m)).find('devel').get('project')
2160 def show_pattern_metalist(apiurl, prj):
2161 url = makeurl(apiurl, ['source', prj, '_pattern'])
2165 except urllib2.HTTPError, e:
2166 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2168 r = [ node.get('name') for node in tree.getroot() ]
2173 def show_pattern_meta(apiurl, prj, pattern):
2174 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2177 return f.readlines()
2178 except urllib2.HTTPError, e:
2179 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2184 """metafile that can be manipulated and is stored back after manipulation."""
2185 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2189 self.change_is_required = change_is_required
2190 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2191 f = os.fdopen(fd, 'w')
2192 f.write(''.join(input))
2194 self.hash_orig = dgst(self.filename)
2197 hash = dgst(self.filename)
2198 if self.change_is_required == True and hash == self.hash_orig:
2199 print 'File unchanged. Not saving.'
2200 os.unlink(self.filename)
2203 print 'Sending meta data...'
2204 # don't do any exception handling... it's up to the caller what to do in case
2206 http_PUT(self.url, file=self.filename)
2207 os.unlink(self.filename)
2211 if sys.platform[:3] != 'win':
2212 editor = os.getenv('EDITOR', default='vim')
2214 editor = os.getenv('EDITOR', default='notepad')
2217 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2221 except urllib2.HTTPError, e:
2222 error_help = "%d" % e.code
2223 if e.headers.get('X-Opensuse-Errorcode'):
2224 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2226 print >>sys.stderr, 'BuildService API error:', error_help
2227 # examine the error - we can't raise an exception because we might want
2230 if '<summary>' in data:
2231 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2232 input = raw_input('Try again? ([y/N]): ')
2233 if input not in ['y', 'Y']:
2239 if os.path.exists(self.filename):
2240 print 'discarding %s' % self.filename
2241 os.unlink(self.filename)
2244 # different types of metadata
2245 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2246 'template': new_project_templ,
2249 'pkg': { 'path' : 'source/%s/%s/_meta',
2250 'template': new_package_templ,
2253 'attribute': { 'path' : 'source/%s/%s/_meta',
2254 'template': new_attribute_templ,
2257 'prjconf': { 'path': 'source/%s/_config',
2261 'user': { 'path': 'person/%s',
2262 'template': new_user_template,
2265 'pattern': { 'path': 'source/%s/_pattern/%s',
2266 'template': new_pattern_template,
2271 def meta_exists(metatype,
2278 apiurl = conf.config['apiurl']
2279 url = make_meta_url(metatype, path_args, apiurl)
2281 data = http_GET(url).readlines()
2282 except urllib2.HTTPError, e:
2283 if e.code == 404 and create_new:
2284 data = metatypes[metatype]['template']
2286 data = StringIO(data % template_args).readlines()
2291 def make_meta_url(metatype, path_args=None, apiurl=None):
2293 apiurl = conf.config['apiurl']
2294 if metatype not in metatypes.keys():
2295 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2296 path = metatypes[metatype]['path']
2299 path = path % path_args
2301 return makeurl(apiurl, [path])
2304 def edit_meta(metatype,
2309 change_is_required=False,
2313 apiurl = conf.config['apiurl']
2315 data = meta_exists(metatype,
2318 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2322 change_is_required = True
2324 url = make_meta_url(metatype, path_args, apiurl)
2325 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2333 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2336 query['rev'] = revision
2338 query['rev'] = 'latest'
2340 query['linkrev'] = linkrev
2341 elif conf.config['linkcontrol']:
2342 query['linkrev'] = 'base'
2346 query['emptylink'] = 1
2347 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2348 return f.readlines()
2351 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2352 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2353 return ET.fromstring(''.join(m)).get('srcmd5')
2356 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2357 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2359 # only source link packages have a <linkinfo> element.
2360 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2368 raise oscerr.LinkExpandError(prj, pac, li.error)
2372 def show_upstream_rev(apiurl, prj, pac):
2373 m = show_files_meta(apiurl, prj, pac)
2374 return ET.fromstring(''.join(m)).get('rev')
2377 def read_meta_from_spec(specfile, *args):
2378 import codecs, locale, re
2380 Read tags and sections from spec file. To read out
2381 a tag the passed argument mustn't end with a colon. To
2382 read out a section the passed argument must start with
2384 This method returns a dictionary which contains the
2388 if not os.path.isfile(specfile):
2389 raise IOError('\'%s\' is not a regular file' % specfile)
2392 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2393 except UnicodeDecodeError:
2394 lines = open(specfile).readlines()
2401 if itm.startswith('%'):
2402 sections.append(itm)
2406 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2408 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2409 if m and m.group('val'):
2410 spec_data[tag] = m.group('val').strip()
2412 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2415 section_pat = '^%s\s*?$'
2416 for section in sections:
2417 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2419 start = lines.index(m.group()+'\n') + 1
2421 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2424 for line in lines[start:]:
2425 if line.startswith('%'):
2428 spec_data[section] = data
2433 def edit_message(footer='', template=''):
2434 delim = '--This line, and those below, will be ignored--\n'
2436 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2437 f = os.fdopen(fd, 'w')
2446 if sys.platform[:3] != 'win':
2447 editor = os.getenv('EDITOR', default='vim')
2449 editor = os.getenv('EDITOR', default='notepad')
2452 subprocess.call('%s %s' % (editor, filename), shell=True)
2453 msg = open(filename).read().split(delim)[0].rstrip()
2458 input = raw_input('Log message not specified\n'
2459 'a)bort, c)ontinue, e)dit: ')
2461 raise oscerr.UserAbort()
2471 def create_delete_request(apiurl, project, package, message):
2476 package = """package="%s" """ % (package)
2482 <action type="delete">
2483 <target project="%s" %s/>
2486 <description>%s</description>
2488 """ % (project, package,
2489 cgi.escape(message or ''))
2491 u = makeurl(apiurl, ['request'], query='cmd=create')
2492 f = http_POST(u, data=xml)
2494 root = ET.parse(f).getroot()
2495 return root.get('id')
2498 def create_change_devel_request(apiurl,
2499 devel_project, devel_package,
2506 <action type="change_devel">
2507 <source project="%s" package="%s" />
2508 <target project="%s" package="%s" />
2511 <description>%s</description>
2513 """ % (devel_project,
2517 cgi.escape(message or ''))
2519 u = makeurl(apiurl, ['request'], query='cmd=create')
2520 f = http_POST(u, data=xml)
2522 root = ET.parse(f).getroot()
2523 return root.get('id')
2526 # This creates an old style submit request for server api 1.0
2527 def create_submit_request(apiurl,
2528 src_project, src_package,
2529 dst_project=None, dst_package=None,
2530 message=None, orev=None, src_update=None):
2535 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2537 # Yes, this kind of xml construction is horrible
2542 packagexml = """package="%s" """ %( dst_package )
2543 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2544 # XXX: keep the old template for now in order to work with old obs instances
2546 <request type="submit">
2548 <source project="%s" package="%s" rev="%s"/>
2553 <description>%s</description>
2557 orev or show_upstream_rev(apiurl, src_project, src_package),
2560 cgi.escape(message or ""))
2562 u = makeurl(apiurl, ['request'], query='cmd=create')
2563 f = http_POST(u, data=xml)
2565 root = ET.parse(f).getroot()
2566 return root.get('id')
2569 def get_request(apiurl, reqid):
2570 u = makeurl(apiurl, ['request', reqid])
2572 root = ET.parse(f).getroot()
2579 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2582 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2583 f = http_POST(u, data=message)
2586 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2589 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2590 f = http_POST(u, data=message)
2594 def get_request_list(apiurl, project='', package='', req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2596 if not 'all' in req_state:
2597 for state in req_state:
2598 xpath = xpath_join(xpath, 'state/@name=\'%s\'' % state, inner=True)
2600 xpath = xpath_join(xpath, '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': req_who}, op='and')
2602 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2606 todo['project'] = project
2608 todo['package'] = package
2609 for kind, val in todo.iteritems():
2610 xpath = xpath_join(xpath, '(action/target/@%(kind)s=\'%(val)s\' or ' \
2611 'action/source/@%(kind)s=\'%(val)s\' or ' \
2612 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2613 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}, op='and')
2615 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2616 for i in exclude_target_projects:
2617 xpath = xpath_join(xpath, '(not(action/target/@project=\'%(prj)s\' or ' \
2618 'submit/target/@project=\'%(prj)s\'))' % {'prj': i}, op='and')
2620 if conf.config['verbose'] > 1:
2621 print '[ %s ]' % xpath
2622 res = search(apiurl, request=xpath)
2623 collection = res['request']
2625 for root in collection.findall('request'):
2631 def get_user_projpkgs_request_list(apiurl, user, req_state=('new',), req_type=None, exclude_projects=[], projpkgs={}):
2632 """Return all new requests for all projects/packages where is user is involved"""
2634 res = get_user_projpkgs(apiurl, user, exclude_projects=exclude_projects)
2635 for i in res['project_id'].findall('project'):
2636 projpkgs[i.get('name')] = []
2637 for i in res['package_id'].findall('package'):
2638 if not i.get('project') in projpkgs.keys():
2639 projpkgs.setdefault(i.get('project'), []).append(i.get('name'))
2641 for prj, pacs in projpkgs.iteritems():
2643 xpath = xpath_join(xpath, 'action/target/@project=\'%s\'' % prj, inner=True)
2647 xp = xpath_join(xp, 'action/target/@package=\'%s\'' % p, inner=True)
2648 xp = xpath_join(xp, 'action/target/@project=\'%s\'' % prj, op='and')
2649 xpath = xpath_join(xpath, xp, inner=True)
2651 xpath = xpath_join(xpath, 'action/@type=\'%s\'' % req_type, op='and')
2652 if not 'all' in req_state:
2654 for state in req_state:
2655 xp = xpath_join(xp, 'state/@name=\'%s\'' % state, inner=True)
2656 xpath = xpath_join(xp, '(%s)' % xpath, op='and')
2657 res = search(apiurl, request=xpath)
2659 for root in res['request'].findall('request'):
2665 def get_request_log(apiurl, reqid):
2666 r = get_request(conf.config['apiurl'], reqid)
2668 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2669 # the description of the request is used for the initial log entry
2670 # otherwise its comment attribute would contain None
2671 if len(r.statehistory) >= 1:
2672 r.statehistory[-1].comment = r.descr
2674 r.state.comment = r.descr
2675 for state in [ r.state ] + r.statehistory:
2676 s = frmt % (state.name, state.who, state.when, str(state.comment))
2681 def get_user_meta(apiurl, user):
2682 u = makeurl(apiurl, ['person', quote_plus(user)])
2685 return ''.join(f.readlines())
2686 except urllib2.HTTPError:
2687 print 'user \'%s\' not found' % user
2691 def get_user_data(apiurl, user, *tags):
2692 """get specified tags from the user meta"""
2693 meta = get_user_meta(apiurl, user)
2696 root = ET.fromstring(meta)
2699 if root.find(tag).text != None:
2700 data.append(root.find(tag).text)
2704 except AttributeError:
2705 # this part is reached if the tags tuple contains an invalid tag
2706 print 'The xml file for user \'%s\' seems to be broken' % user
2711 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2712 import tempfile, shutil
2715 query = { 'rev': revision }
2719 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2720 o = os.fdopen(fd, 'wb')
2721 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2722 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2725 shutil.move(tmpfile, targetfilename or filename)
2726 os.chmod(targetfilename or filename, 0644)
2734 def get_binary_file(apiurl, prj, repo, arch,
2737 target_filename = None,
2738 target_mtime = None,
2739 progress_meter = False):
2741 target_filename = target_filename or filename
2743 where = package or '_repository'
2744 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2747 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2751 binsize = int(f.headers['content-length'])
2754 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2755 os.chmod(tmpfilename, 0644)
2758 o = os.fdopen(fd, 'wb')
2762 #buf = f.read(BUFSIZE)
2766 downloaded += len(buf)
2768 completion = str(int((float(downloaded)/binsize)*100))
2769 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2774 sys.stdout.write('\n')
2776 shutil.move(tmpfilename, target_filename)
2778 os.utime(target_filename, (-1, target_mtime))
2780 # make sure that the temp file is cleaned up when we are interrupted
2782 try: os.unlink(tmpfilename)
2785 def dgst_from_string(str):
2786 # Python 2.5 depracates the md5 modules
2787 # Python 2.4 doesn't have hashlib yet
2790 md5_hash = hashlib.md5()
2793 md5_hash = md5.new()
2794 md5_hash.update(str)
2795 return md5_hash.hexdigest()
2799 #if not os.path.exists(file):
2809 f = open(file, 'rb')
2811 buf = f.read(BUFSIZE)
2814 return s.hexdigest()
2819 """return true if a string is binary data using diff's heuristic"""
2820 if s and '\0' in s[:4096]:
2825 def binary_file(fn):
2826 """read 4096 bytes from a file named fn, and call binary() on the data"""
2827 return binary(open(fn, 'rb').read(4096))
2830 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2832 This methods diffs oldfilename against filename (so filename will
2833 be shown as the new file).
2834 The variable origfilename is used if filename and oldfilename differ
2835 in their names (for instance if a tempfile is used for filename etc.)
2841 oldfilename = filename
2844 olddir = os.path.join(dir, store)
2846 if not origfilename:
2847 origfilename = filename
2849 file1 = os.path.join(olddir, oldfilename) # old/stored original
2850 file2 = os.path.join(dir, filename) # working copy
2852 f1 = open(file1, 'rb')
2856 f2 = open(file2, 'rb')
2860 if binary(s1) or binary (s2):
2861 d = ['Binary file %s has changed\n' % origfilename]
2864 d = difflib.unified_diff(\
2867 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2868 tofile = '%s\t(working copy)' % origfilename)
2870 # if file doesn't end with newline, we need to append one in the diff result
2872 for i, line in enumerate(d):
2873 if not line.endswith('\n'):
2874 d[i] += '\n\\ No newline at end of file'
2880 def make_diff(wc, revision):
2886 diff_hdr = 'Index: %s\n'
2887 diff_hdr += '===================================================================\n'
2889 olddir = os.getcwd()
2893 for file in wc.todo:
2894 if file in wc.filenamelist+wc.filenamelist_unvers:
2895 state = wc.status(file)
2897 added_files.append(file)
2899 removed_files.append(file)
2900 elif state == 'M' or state == 'C':
2901 changed_files.append(file)
2903 diff.append('osc: \'%s\' is not under version control' % file)
2905 for file in wc.filenamelist+wc.filenamelist_unvers:
2906 state = wc.status(file)
2907 if state == 'M' or state == 'C':
2908 changed_files.append(file)
2910 added_files.append(file)
2912 removed_files.append(file)
2914 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2916 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2917 cmp_pac = Package(tmpdir)
2919 for file in wc.todo:
2920 if file in cmp_pac.filenamelist:
2921 if file in wc.filenamelist:
2922 changed_files.append(file)
2924 diff.append('osc: \'%s\' is not under version control' % file)
2926 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2928 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2930 for file in changed_files:
2931 diff.append(diff_hdr % file)
2933 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2935 cmp_pac.updatefile(file, revision)
2936 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2937 cmp_pac.absdir, file))
2938 (fd, tmpfile) = tempfile.mkstemp()
2939 for file in added_files:
2940 diff.append(diff_hdr % file)
2942 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2943 os.path.dirname(tmpfile), file))
2945 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2946 os.path.dirname(tmpfile), file))
2948 # FIXME: this is ugly but it cannot be avoided atm
2949 # if a file is deleted via "osc rm file" we should keep the storefile.
2951 if cmp_pac == None and removed_files:
2952 tmpdir = tempfile.mkdtemp()
2954 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2955 tmp_pac = Package(tmpdir)
2958 for file in removed_files:
2959 diff.append(diff_hdr % file)
2961 tmp_pac.updatefile(file, tmp_pac.rev)
2962 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2963 wc.rev, file, tmp_pac.storedir, file))
2965 cmp_pac.updatefile(file, revision)
2966 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2967 revision, file, cmp_pac.storedir, file))
2971 delete_dir(cmp_pac.absdir)
2973 delete_dir(tmp_pac.absdir)
2977 def server_diff(apiurl,
2978 old_project, old_package, old_revision,
2979 new_project, new_package, new_revision, unified=False):
2981 query = {'cmd': 'diff', 'expand': '1'}
2983 query['oproject'] = old_project
2985 query['opackage'] = old_package
2987 query['orev'] = old_revision
2989 query['rev'] = new_revision
2991 query['unified'] = 1
2993 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
2999 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
3001 creates the plain directory structure for a package dir.
3002 The 'apiurl' parameter is needed for the project dir initialization.
3003 The 'project' and 'package' parameters specify the name of the
3004 project and the package. The optional 'pathname' parameter is used
3005 for printing out the message that a new dir was created (default: 'prj_dir/package').
3006 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
3008 prj_dir = prj_dir or project
3010 # FIXME: carefully test each patch component of prj_dir,
3011 # if we have a .osc/_files entry at that level.
3012 # -> if so, we have a package/project clash,
3013 # and should rename this path component by appending '.proj'
3014 # and give user a warning message, to discourage such clashes
3016 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3017 if is_package_dir(prj_dir):
3018 # we want this to become a project directory,
3019 # but it already is a package directory.
3020 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3022 if not is_project_dir(prj_dir):
3023 # this directory could exist as a parent direory for one of our earlier
3024 # checked out sub-projects. in this case, we still need to initialize it.
3025 print statfrmt('A', prj_dir)
3026 init_project_dir(apiurl, prj_dir, project)
3028 if is_project_dir(os.path.join(prj_dir, package)):
3029 # the thing exists, but is a project directory and not a package directory
3030 # FIXME: this should be a warning message to discourage package/project clashes
3031 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3033 if not os.path.exists(os.path.join(prj_dir, package)):
3034 print statfrmt('A', pathname)
3035 os.mkdir(os.path.join(prj_dir, package))
3036 os.mkdir(os.path.join(prj_dir, package, store))
3038 return(os.path.join(prj_dir, package))
3041 def checkout_package(apiurl, project, package,
3042 revision=None, pathname=None, prj_obj=None,
3043 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3045 # the project we're in might be deleted.
3046 # that'll throw an error then.
3047 olddir = os.getcwd()
3049 olddir = os.environ.get("PWD")
3054 if sys.platform[:3] == 'win':
3055 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3057 if conf.config['checkout_no_colon']:
3058 prj_dir = prj_dir.replace(':', '/')
3061 pathname = getTransActPath(os.path.join(prj_dir, package))
3063 # before we create directories and stuff, check if the package actually
3065 show_package_meta(apiurl, project, package)
3069 # try to read from the linkinfo
3070 # if it is a link we use the xsrcmd5 as the revision to be
3073 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3075 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3080 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3081 init_package_dir(apiurl, project, package, store, revision)
3083 p = Package(package, progress_obj=progress_obj)
3086 for filename in p.filenamelist:
3087 if service_files or not filename.startswith('_service:'):
3088 p.updatefile(filename, revision)
3089 # print 'A ', os.path.join(project, package, filename)
3090 print statfrmt('A', os.path.join(pathname, filename))
3091 if conf.config['do_package_tracking']:
3092 # check if we can re-use an existing project object
3094 prj_obj = Project(os.getcwd())
3095 prj_obj.set_state(p.name, ' ')
3096 prj_obj.write_packages()
3100 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3101 dst_userid = None, keep_develproject = False):
3103 update pkgmeta with new new_name and new_prj and set calling user as the
3104 only maintainer (unless keep_maintainers is set). Additionally remove the
3105 develproject entry (<devel />) unless keep_develproject is true.
3107 root = ET.fromstring(''.join(pkgmeta))
3108 root.set('name', new_name)
3109 root.set('project', new_prj)
3110 if not keep_maintainers:
3111 for person in root.findall('person'):
3113 if not keep_develproject:
3114 for dp in root.findall('devel'):
3116 return ET.tostring(root)
3118 def link_to_branch(apiurl, project, package):
3120 convert a package with a _link + project.diff to a branch
3123 if '_link' in meta_get_filelist(apiurl, project, package):
3124 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3127 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3129 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3131 create a linked package
3132 - "src" is the original package
3133 - "dst" is the "link" package that we are creating here
3138 dst_meta = meta_exists(metatype='pkg',
3139 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3141 create_new=False, apiurl=conf.config['apiurl'])
3143 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3144 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3149 root = ET.fromstring(''.join(dst_meta))
3150 elm = root.find('publish')
3152 elm = ET.SubElement(root, 'publish')
3154 ET.SubElement(elm, 'disable')
3155 dst_meta = ET.tostring(root)
3158 path_args=(dst_project, dst_package),
3160 # create the _link file
3161 # but first, make sure not to overwrite an existing one
3162 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3164 print >>sys.stderr, 'forced overwrite of existing _link file'
3167 print >>sys.stderr, '_link file already exists...! Aborting'
3171 rev = 'rev="%s"' % rev
3176 cicount = 'cicount="%s"' % cicount
3180 print 'Creating _link...',
3181 link_template = """\
3182 <link project="%s" package="%s" %s %s>
3184 <!-- <apply name="patch" /> apply a patch on the source directory -->
3185 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3186 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3187 <!-- <delete>filename</delete> delete a file -->
3190 """ % (src_project, src_package, rev, cicount)
3192 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3193 http_PUT(u, data=link_template)
3196 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3199 - "src" is the original package
3200 - "dst" is the "aggregate" package that we are creating here
3201 - "map" is a dictionary SRC => TARGET repository mappings
3206 dst_meta = meta_exists(metatype='pkg',
3207 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3209 create_new=False, apiurl=conf.config['apiurl'])
3211 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3212 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3217 root = ET.fromstring(''.join(dst_meta))
3218 elm = root.find('publish')
3220 elm = ET.SubElement(root, 'publish')
3222 ET.SubElement(elm, 'disable')
3223 dst_meta = ET.tostring(root)
3226 path_args=(dst_project, dst_package),
3229 # create the _aggregate file
3230 # but first, make sure not to overwrite an existing one
3231 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3233 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3236 print 'Creating _aggregate...',
3237 aggregate_template = """\
3239 <aggregate project="%s">
3241 for tgt, src in repo_map.iteritems():
3242 aggregate_template += """\
3243 <repository target="%s" source="%s" />
3246 aggregate_template += """\
3247 <package>%s</package>
3250 """ % ( src_package)
3252 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3253 http_PUT(u, data=aggregate_template)
3257 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3259 Branch packages defined via attributes (via API call)
3261 query = { 'cmd': 'branch' }
3262 query['attribute'] = attribute
3264 query['target_project'] = targetproject
3266 query['package'] = package
3267 if maintained_update_project_attribute:
3268 query['update_project_attribute'] = maintained_update_project_attribute
3270 u = makeurl(apiurl, ['source'], query=query)
3274 except urllib2.HTTPError, e:
3275 msg = ''.join(e.readlines())
3276 msg = msg.split('<summary>')[1]
3277 msg = msg.split('</summary>')[0]
3278 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3282 r = r.split('targetproject">')[1]
3283 r = r.split('</data>')[0]
3287 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False, msg=''):
3289 Branch a package (via API call)
3291 query = { 'cmd': 'branch' }
3293 query['ignoredevel'] = '1'
3297 query['target_project'] = target_project
3299 query['target_package'] = target_package
3301 query['comment'] = msg
3302 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3305 except urllib2.HTTPError, e:
3306 if not return_existing:
3308 msg = ''.join(e.readlines())
3309 msg = msg.split('<summary>')[1]
3310 msg = msg.split('</summary>')[0]
3311 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3315 return (True, m.group(1), m.group(2), None, None)
3318 for i in ET.fromstring(f.read()).findall('data'):
3319 data[i.get('name')] = i.text
3320 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3321 data.get('sourceproject', None), data.get('sourcepackage', None))
3324 def copy_pac(src_apiurl, src_project, src_package,
3325 dst_apiurl, dst_project, dst_package,
3326 client_side_copy = False,
3327 keep_maintainers = False,
3328 keep_develproject = False,
3333 Create a copy of a package.
3335 Copying can be done by downloading the files from one package and commit
3336 them into the other by uploading them (client-side copy) --
3337 or by the server, in a single api call.
3340 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3341 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3342 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3343 dst_userid, keep_develproject)
3345 print 'Sending meta data...'
3346 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3347 http_PUT(u, data=src_meta)
3349 print 'Copying files...'
3350 if not client_side_copy:
3351 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3353 query['expand'] = '1'
3355 query['orev'] = revision
3357 query['comment'] = comment
3358 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3363 # copy one file after the other
3365 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3367 query = {'rev': 'upload'}
3368 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3370 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=revision)
3371 u = makeurl(dst_apiurl, ['source', dst_project, dst_