1 # Copyright (C) 2006 Novell Inc. All rights reserved.
2 # This program is free software; it may be used, copied, modified
3 # and distributed under the terms of the GNU General Public Licence,
4 # either version 2, or version 3 (at your option).
6 __version__ = '0.125git'
8 # __store_version__ is to be incremented when the format of the working copy
9 # "store" changes in an incompatible way. Please add any needed migration
10 # functionality to check_store_version().
11 __store_version__ = '1.0'
17 from urllib import pathname2url, quote_plus, urlencode, unquote
18 from urlparse import urlsplit, urlunsplit
19 from cStringIO import StringIO
27 from xml.etree import cElementTree as ET
29 import cElementTree as ET
33 DISTURL_RE = re.compile(r"^(?P<bs>.*)://(?P<apiurl>.*?)/(?P<project>.*?)/(?P<repository>.*?)/(?P<revision>.*)-(?P<source>.*)$")
34 BUILDLOGURL_RE = re.compile(r"^(?P<apiurl>https?://.*?)/build/(?P<project>.*?)/(?P<repository>.*?)/(?P<arch>.*?)/(?P<package>.*?)/_log$")
38 # NOTE: do not use this anymore, use conf.exclude_glob instead.
39 # but this needs to stay to avoid breakage of tools which use osc lib
40 exclude_stuff = [store, 'CVS', '*~', '#*#', '.*', '_linkerror']
42 new_project_templ = """\
43 <project name="%(name)s">
45 <title></title> <!-- Short title of NewProject -->
47 <!-- This is for a longer description of the purpose of the project -->
50 <person role="maintainer" userid="%(user)s" />
51 <person role="bugowner" userid="%(user)s" />
52 <!-- remove this block to publish your packages on the mirrors -->
63 <!-- remove this comment to enable one or more build targets
65 <repository name="openSUSE_Factory">
66 <path project="openSUSE:Factory" repository="standard" />
70 <repository name="openSUSE_11.2">
71 <path project="openSUSE:11.2" repository="standard"/>
75 <repository name="openSUSE_11.1">
76 <path project="openSUSE:11.1" repository="standard"/>
80 <repository name="openSUSE_11.0">
81 <path project="openSUSE:11.0" repository="standard"/>
85 <repository name="Fedora_11">
86 <path project="Fedora:11" repository="standard" />
90 <repository name="SLE_11">
91 <path project="SUSE:SLE-11" repository="standard" />
95 <repository name="SLE_10">
96 <path project="SUSE:SLE-10:SDK" repository="standard" />
105 new_package_templ = """\
106 <package name="%(name)s">
108 <title></title> <!-- Title of package -->
111 <!-- for long description -->
114 <person role="maintainer" userid="%(user)s"/>
115 <person role="bugowner" userid="%(user)s"/>
117 <url>PUT_UPSTREAM_URL_HERE</url>
121 use one of the examples below to disable building of this package
122 on a certain architecture, in a certain repository,
123 or a combination thereof:
125 <disable arch="x86_64"/>
126 <disable repository="SUSE_SLE-10"/>
127 <disable repository="SUSE_SLE-10" arch="x86_64"/>
129 Possible sections where you can use the tags above:
139 Please have a look at:
140 http://en.opensuse.org/Restricted_Formats
141 Packages containing formats listed there are NOT allowed to
142 be packaged in the openSUSE Buildservice and will be deleted!
149 new_attribute_templ = """\
151 <attribute namespace="" name="">
157 new_user_template = """\
159 <login>%(user)s</login>
160 <email>PUT_EMAIL_ADDRESS_HERE</email>
161 <realname>PUT_REAL_NAME_HERE</realname>
163 <project name="home:%(user)s"/>
179 new_pattern_template = """\
180 <!-- See http://svn.opensuse.org/svn/zypp/trunk/libzypp/zypp/parser/yum/schema/patterns.rng -->
186 buildstatus_symbols = {'succeeded': '.',
188 'expansion error': 'E',
199 # os.path.samefile is available only under Unix
200 def os_path_samefile(path1, path2):
202 return os.path.samefile(path1, path2)
204 return os.path.realpath(path1) == os.path.realpath(path2)
207 """represent a file, including its metadata"""
208 def __init__(self, name, md5, size, mtime):
218 """Source service content
221 """creates an empty serviceinfo instance"""
224 def read(self, serviceinfo_node):
225 """read in the source services <services> element passed as
228 if serviceinfo_node == None:
231 services = serviceinfo_node.findall('service')
233 for service in services:
234 name = service.get('name')
236 for param in service.findall('param'):
237 option = param.get('name', None)
239 name += " --" + option + " '" + value + "'"
240 self.commands.append(name)
242 msg = 'invalid service format:\n%s' % ET.tostring(root)
243 raise oscerr.APIError(msg)
245 def execute(self, dir):
248 for call in self.commands:
249 temp_dir = tempfile.mkdtemp()
250 name = call.split(None, 1)[0]
251 if not os.path.exists("/usr/lib/obs/service/"+name):
252 msg = "ERROR: service is not installed !"
253 msg += "Can maybe solved with: zypper in obs-server-" + name
254 raise oscerr.APIError(msg)
255 c = "/usr/lib/obs/service/" + call + " --outdir " + temp_dir
256 ret = subprocess.call(c, shell=True)
258 print "ERROR: service call failed: " + c
260 for file in os.listdir(temp_dir):
261 os.rename( os.path.join(temp_dir, file), os.path.join(dir, "_service:"+name+":"+file) )
265 """linkinfo metadata (which is part of the xml representing a directory
268 """creates an empty linkinfo instance"""
278 def read(self, linkinfo_node):
279 """read in the linkinfo metadata from the <linkinfo> element passed as
281 If the passed element is None, the method does nothing.
283 if linkinfo_node == None:
285 self.project = linkinfo_node.get('project')
286 self.package = linkinfo_node.get('package')
287 self.xsrcmd5 = linkinfo_node.get('xsrcmd5')
288 self.lsrcmd5 = linkinfo_node.get('lsrcmd5')
289 self.srcmd5 = linkinfo_node.get('srcmd5')
290 self.error = linkinfo_node.get('error')
291 self.rev = linkinfo_node.get('rev')
292 self.baserev = linkinfo_node.get('baserev')
295 """returns True if the linkinfo is not empty, otherwise False"""
296 if self.xsrcmd5 or self.lsrcmd5:
300 def isexpanded(self):
301 """returns True if the package is an expanded link"""
302 if self.lsrcmd5 and not self.xsrcmd5:
307 """returns True if the link is in error state (could not be applied)"""
313 """return an informatory string representation"""
314 if self.islink() and not self.isexpanded():
315 return 'project %s, package %s, xsrcmd5 %s, rev %s' \
316 % (self.project, self.package, self.xsrcmd5, self.rev)
317 elif self.islink() and self.isexpanded():
319 return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' \
320 % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error)
322 return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' \
323 % (self.project, self.package, self.srcmd5, self.lsrcmd5)
329 """represent a project directory, holding packages"""
330 def __init__(self, dir, getPackageList=True, progress_obj=None):
333 self.absdir = os.path.abspath(dir)
334 self.progress_obj = progress_obj
336 self.name = store_read_project(self.dir)
337 self.apiurl = store_read_apiurl(self.dir)
340 self.pacs_available = meta_get_packagelist(self.apiurl, self.name)
342 self.pacs_available = []
344 if conf.config['do_package_tracking']:
345 self.pac_root = self.read_packages().getroot()
346 self.pacs_have = [ pac.get('name') for pac in self.pac_root.findall('package') ]
347 self.pacs_excluded = [ i for i in os.listdir(self.dir)
348 for j in conf.config['exclude_glob']
349 if fnmatch.fnmatch(i, j) ]
350 self.pacs_unvers = [ i for i in os.listdir(self.dir) if i not in self.pacs_have and i not in self.pacs_excluded ]
351 # store all broken packages (e.g. packages which where removed by a non-osc cmd)
352 # in the self.pacs_broken list
353 self.pacs_broken = []
354 for p in self.pacs_have:
355 if not os.path.isdir(os.path.join(self.absdir, p)):
356 # all states will be replaced with the '!'-state
357 # (except it is already marked as deleted ('D'-state))
358 self.pacs_broken.append(p)
360 self.pacs_have = [ i for i in os.listdir(self.dir) if i in self.pacs_available ]
362 self.pacs_missing = [ i for i in self.pacs_available if i not in self.pacs_have ]
364 def checkout_missing_pacs(self, expand_link=False):
365 for pac in self.pacs_missing:
367 if conf.config['do_package_tracking'] and pac in self.pacs_unvers:
368 # pac is not under version control but a local file/dir exists
369 msg = 'can\'t add package \'%s\': Object already exists' % pac
370 raise oscerr.PackageExists(self.name, pac, msg)
372 print 'checking out new package %s' % pac
373 checkout_package(self.apiurl, self.name, pac, \
374 pathname=getTransActPath(os.path.join(self.dir, pac)), \
375 prj_obj=self, prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
377 def set_state(self, pac, state):
378 node = self.get_package_node(pac)
380 self.new_package_entry(pac, state)
382 node.attrib['state'] = state
384 def get_package_node(self, pac):
385 for node in self.pac_root.findall('package'):
386 if pac == node.get('name'):
390 def del_package_node(self, pac):
391 for node in self.pac_root.findall('package'):
392 if pac == node.get('name'):
393 self.pac_root.remove(node)
395 def get_state(self, pac):
396 node = self.get_package_node(pac)
398 return node.get('state')
402 def new_package_entry(self, name, state):
403 ET.SubElement(self.pac_root, 'package', name=name, state=state)
405 def read_packages(self):
406 packages_file = os.path.join(self.absdir, store, '_packages')
407 if os.path.isfile(packages_file) and os.path.getsize(packages_file):
408 return ET.parse(packages_file)
410 # scan project for existing packages and migrate them
412 for data in os.listdir(self.dir):
413 pac_dir = os.path.join(self.absdir, data)
414 # we cannot use self.pacs_available because we cannot guarantee that the package list
415 # was fetched from the server
416 if data in meta_get_packagelist(self.apiurl, self.name) and is_package_dir(pac_dir) \
417 and Package(pac_dir).name == data:
418 cur_pacs.append(ET.Element('package', name=data, state=' '))
419 store_write_initial_packages(self.absdir, self.name, cur_pacs)
420 return ET.parse(os.path.join(self.absdir, store, '_packages'))
422 def write_packages(self):
423 # TODO: should we only modify the existing file instead of overwriting?
424 ET.ElementTree(self.pac_root).write(os.path.join(self.absdir, store, '_packages'))
426 def addPackage(self, pac):
428 for i in conf.config['exclude_glob']:
429 if fnmatch.fnmatch(pac, i):
430 msg = 'invalid package name: \'%s\' (see \'exclude_glob\' config option)' % pac
431 raise oscerr.OscIOError(None, msg)
432 state = self.get_state(pac)
433 if state == None or state == 'D':
434 self.new_package_entry(pac, 'A')
435 self.write_packages()
436 # sometimes the new pac doesn't exist in the list because
437 # it would take too much time to update all data structs regularly
438 if pac in self.pacs_unvers:
439 self.pacs_unvers.remove(pac)
441 raise oscerr.PackageExists(self.name, pac, 'package \'%s\' is already under version control' % pac)
443 def delPackage(self, pac, force = False):
444 state = self.get_state(pac.name)
446 if state == ' ' or state == 'D':
448 for file in pac.filenamelist + pac.filenamelist_unvers:
449 filestate = pac.status(file)
450 if filestate == 'M' or filestate == 'C' or \
451 filestate == 'A' or filestate == '?':
454 del_files.append(file)
455 if can_delete or force:
456 for file in del_files:
457 pac.delete_localfile(file)
458 if pac.status(file) != '?':
459 pac.delete_storefile(file)
460 # this is not really necessary
461 pac.put_on_deletelist(file)
462 print statfrmt('D', getTransActPath(os.path.join(pac.dir, file)))
463 print statfrmt('D', getTransActPath(os.path.join(pac.dir, os.pardir, pac.name)))
464 pac.write_deletelist()
465 self.set_state(pac.name, 'D')
466 self.write_packages()
468 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
471 delete_dir(pac.absdir)
472 self.del_package_node(pac.name)
473 self.write_packages()
474 print statfrmt('D', pac.name)
476 print 'package \'%s\' has local modifications (see osc st for details)' % pac.name
478 print 'package is not under version control'
480 print 'unsupported state'
482 def update(self, pacs = (), expand_link=False, unexpand_link=False, service_files=False):
485 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
487 # we need to make sure that the _packages file will be written (even if an exception
490 # update complete project
491 # packages which no longer exists upstream
492 upstream_del = [ pac for pac in self.pacs_have if not pac in self.pacs_available and self.get_state(pac) != 'A']
494 for pac in upstream_del:
495 p = Package(os.path.join(self.dir, pac))
496 self.delPackage(p, force = True)
497 delete_storedir(p.storedir)
502 self.pac_root.remove(self.get_package_node(p.name))
503 self.pacs_have.remove(pac)
505 for pac in self.pacs_have:
506 state = self.get_state(pac)
507 if pac in self.pacs_broken:
508 if self.get_state(pac) != 'A':
509 checkout_package(self.apiurl, self.name, pac,
510 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
511 prj_dir=self.dir, expand_link=not unexpand_link, progress_obj=self.progress_obj)
514 p = Package(os.path.join(self.dir, pac), progress_obj=self.progress_obj)
516 if expand_link and p.islink() and not p.isexpanded():
519 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev)
521 rev = show_upstream_xsrcmd5(p.apiurl, p.prjname, p.name, revision=p.rev, linkrev="base")
524 rev = p.linkinfo.xsrcmd5
525 print 'Expanding to rev', rev
526 elif unexpand_link and p.islink() and p.isexpanded():
527 rev = p.linkinfo.lsrcmd5
528 print 'Unexpanding to rev', rev
529 elif p.islink() and p.isexpanded():
531 print 'Updating %s' % p.name
532 p.update(rev, service_files)
536 # TODO: Package::update has to fixed to behave like svn does
537 if pac in self.pacs_broken:
538 checkout_package(self.apiurl, self.name, pac,
539 pathname=getTransActPath(os.path.join(self.dir, pac)), prj_obj=self, \
540 prj_dir=self.dir, expand_link=expand_link, progress_obj=self.progress_obj)
542 Package(os.path.join(self.dir, pac, progress_obj=self.progress_obj)).update()
543 elif state == 'A' and pac in self.pacs_available:
544 # file/dir called pac already exists and is under version control
545 msg = 'can\'t add package \'%s\': Object already exists' % pac
546 raise oscerr.PackageExists(self.name, pac, msg)
551 print 'unexpected state.. package \'%s\'' % pac
553 self.checkout_missing_pacs(expand_link=not unexpand_link)
555 self.write_packages()
557 def commit(self, pacs = (), msg = '', files = {}):
562 if files.has_key(pac):
564 state = self.get_state(pac)
566 self.commitNewPackage(pac, msg, todo)
568 self.commitDelPackage(pac)
570 # display the correct dir when sending the changes
571 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
574 p = Package(os.path.join(self.dir, pac))
577 elif pac in self.pacs_unvers and not is_package_dir(os.path.join(self.dir, pac)):
578 print 'osc: \'%s\' is not under version control' % pac
579 elif pac in self.pacs_broken:
580 print 'osc: \'%s\' package not found' % pac
582 self.commitExtPackage(pac, msg, todo)
584 self.write_packages()
586 # if we have packages marked as '!' we cannot commit
587 for pac in self.pacs_broken:
588 if self.get_state(pac) != 'D':
589 msg = 'commit failed: package \'%s\' is missing' % pac
590 raise oscerr.PackageMissing(self.name, pac, msg)
592 for pac in self.pacs_have:
593 state = self.get_state(pac)
596 Package(os.path.join(self.dir, pac)).commit(msg)
598 self.commitDelPackage(pac)
600 self.commitNewPackage(pac, msg)
602 self.write_packages()
604 def commitNewPackage(self, pac, msg = '', files = []):
605 """creates and commits a new package if it does not exist on the server"""
606 if pac in self.pacs_available:
607 print 'package \'%s\' already exists' % pac
609 user = conf.get_apiurl_usr(self.apiurl)
610 edit_meta(metatype='pkg',
611 path_args=(quote_plus(self.name), quote_plus(pac)),
616 # display the correct dir when sending the changes
618 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
622 p = Package(os.path.join(self.dir, pac))
624 print statfrmt('Sending', os.path.normpath(p.dir))
626 self.set_state(pac, ' ')
629 def commitDelPackage(self, pac):
630 """deletes a package on the server and in the working copy"""
632 # display the correct dir when sending the changes
633 if os_path_samefile(os.path.join(self.dir, pac), os.curdir):
636 pac_dir = os.path.join(self.dir, pac)
637 p = Package(os.path.join(self.dir, pac))
638 #print statfrmt('Deleting', os.path.normpath(os.path.join(p.dir, os.pardir, pac)))
639 delete_storedir(p.storedir)
645 pac_dir = os.path.join(self.dir, pac)
646 #print statfrmt('Deleting', getTransActPath(os.path.join(self.dir, pac)))
647 print statfrmt('Deleting', getTransActPath(pac_dir))
648 delete_package(self.apiurl, self.name, pac)
649 self.del_package_node(pac)
651 def commitExtPackage(self, pac, msg, files = []):
652 """commits a package from an external project"""
653 if os_path_samefile(os.path.join(self.dir, pac), os.getcwd()):
656 pac_path = os.path.join(self.dir, pac)
658 project = store_read_project(pac_path)
659 package = store_read_package(pac_path)
660 apiurl = store_read_apiurl(pac_path)
661 if meta_exists(metatype='pkg',
662 path_args=(quote_plus(project), quote_plus(package)),
664 create_new=False, apiurl=apiurl):
665 p = Package(pac_path)
669 user = conf.get_apiurl_usr(self.apiurl)
670 edit_meta(metatype='pkg',
671 path_args=(quote_plus(project), quote_plus(package)),
676 p = Package(pac_path)
682 r.append('*****************************************************')
683 r.append('Project %s (dir=%s, absdir=%s)' % (self.name, self.dir, self.absdir))
684 r.append('have pacs:\n%s' % ', '.join(self.pacs_have))
685 r.append('missing pacs:\n%s' % ', '.join(self.pacs_missing))
686 r.append('*****************************************************')
692 """represent a package (its directory) and read/keep/write its metadata"""
693 def __init__(self, workingdir, progress_obj=None):
694 self.dir = workingdir
695 self.absdir = os.path.abspath(self.dir)
696 self.storedir = os.path.join(self.absdir, store)
697 self.progress_obj = progress_obj
699 check_store_version(self.dir)
701 self.prjname = store_read_project(self.dir)
702 self.name = store_read_package(self.dir)
703 self.apiurl = store_read_apiurl(self.dir)
705 self.update_datastructs()
709 self.todo_delete = []
712 source_url = makeurl(self.apiurl, ['source', self.prjname, self.name])
713 r = info_templ % (self.prjname, self.name, self.absdir, self.apiurl, source_url, self.srcmd5, self.rev, self.linkinfo)
716 def addfile(self, n):
717 st = os.stat(os.path.join(self.dir, n))
718 f = File(n, None, st.st_size, st.st_mtime)
719 self.filelist.append(f)
720 self.filenamelist.append(n)
721 self.filenamelist_unvers.remove(n)
722 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
724 def delete_file(self, n, force=False):
725 """deletes a file if possible and marks the file as deleted"""
726 state = self.status(n)
727 if state in ['?', 'A', 'M'] and not force:
728 return (False, state)
729 self.delete_localfile(n)
731 self.put_on_deletelist(n)
732 self.write_deletelist()
734 self.delete_storefile(n)
737 def delete_storefile(self, n):
738 try: os.unlink(os.path.join(self.storedir, n))
741 def delete_localfile(self, n):
742 try: os.unlink(os.path.join(self.dir, n))
745 def put_on_deletelist(self, n):
746 if n not in self.to_be_deleted:
747 self.to_be_deleted.append(n)
749 def put_on_conflictlist(self, n):
750 if n not in self.in_conflict:
751 self.in_conflict.append(n)
753 def clear_from_conflictlist(self, n):
754 """delete an entry from the file, and remove the file if it would be empty"""
755 if n in self.in_conflict:
757 filename = os.path.join(self.dir, n)
758 storefilename = os.path.join(self.storedir, n)
759 myfilename = os.path.join(self.dir, n + '.mine')
760 if self.islinkrepair() or self.ispulled():
761 upfilename = os.path.join(self.dir, n + '.new')
763 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
766 os.unlink(myfilename)
767 # the working copy may be updated, so the .r* ending may be obsolete...
769 os.unlink(upfilename)
770 if self.islinkrepair() or self.ispulled():
771 os.unlink(os.path.join(self.dir, n + '.old'))
775 self.in_conflict.remove(n)
777 self.write_conflictlist()
779 def write_deletelist(self):
780 if len(self.to_be_deleted) == 0:
782 os.unlink(os.path.join(self.storedir, '_to_be_deleted'))
786 fname = os.path.join(self.storedir, '_to_be_deleted')
788 f.write('\n'.join(self.to_be_deleted))
792 def delete_source_file(self, n):
793 """delete local a source file"""
794 self.delete_localfile(n)
795 self.delete_storefile(n)
797 def delete_remote_source_file(self, n):
798 """delete a remote source file (e.g. from the server)"""
800 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
803 def put_source_file(self, n):
805 # escaping '+' in the URL path (note: not in the URL query string) is
806 # only a workaround for ruby on rails, which swallows it otherwise
808 u = makeurl(self.apiurl, ['source', self.prjname, self.name, pathname2url(n)], query=query)
809 http_PUT(u, file = os.path.join(self.dir, n))
811 shutil.copyfile(os.path.join(self.dir, n), os.path.join(self.storedir, n))
813 def commit(self, msg=''):
814 # commit only if the upstream revision is the same as the working copy's
815 upstream_rev = self.latest_rev()
816 if self.rev != upstream_rev:
817 raise oscerr.WorkingCopyOutdated((self.absdir, self.rev, upstream_rev))
820 self.todo = self.filenamelist_unvers + self.filenamelist
822 pathn = getTransActPath(self.dir)
824 have_conflicts = False
825 for filename in self.todo:
826 if not filename.startswith('_service:') and not filename.startswith('_service_'):
827 st = self.status(filename)
828 if st == 'A' or st == 'M':
829 self.todo_send.append(filename)
830 print statfrmt('Sending', os.path.join(pathn, filename))
832 self.todo_delete.append(filename)
833 print statfrmt('Deleting', os.path.join(pathn, filename))
835 have_conflicts = True
838 print 'Please resolve all conflicts before committing using "osc resolved FILE"!'
841 if not self.todo_send and not self.todo_delete and not self.rev == "upload" and not self.islinkrepair() and not self.ispulled():
842 print 'nothing to do for package %s' % self.name
845 if self.islink() and self.isexpanded():
846 # resolve the link into the upload revision
847 # XXX: do this always?
848 query = { 'cmd': 'copy', 'rev': 'upload', 'orev': self.rev }
849 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
852 print 'Transmitting file data ',
854 for filename in self.todo_delete:
855 # do not touch local files on commit --
856 # delete remotely instead
857 self.delete_remote_source_file(filename)
858 self.to_be_deleted.remove(filename)
859 for filename in self.todo_send:
860 sys.stdout.write('.')
862 self.put_source_file(filename)
864 # all source files are committed - now comes the log
865 query = { 'cmd' : 'commit',
867 'user' : conf.get_apiurl_usr(self.apiurl),
869 if self.islink() and self.isexpanded():
870 query['keeplink'] = '1'
871 if conf.config['linkcontrol'] or self.isfrozen():
872 query['linkrev'] = self.linkinfo.srcmd5
874 query['repairlink'] = '1'
875 query['linkrev'] = self.get_pulled_srcmd5()
876 if self.islinkrepair():
877 query['repairlink'] = '1'
878 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
880 except urllib2.HTTPError, e:
881 # delete upload revision
883 query = { 'cmd': 'deleteuploadrev' }
884 u = makeurl(self.apiurl, ['source', self.prjname, self.name], query=query)
890 root = ET.parse(f).getroot()
891 self.rev = int(root.get('rev'))
893 print 'Committed revision %s.' % self.rev
896 os.unlink(os.path.join(self.storedir, '_pulled'))
897 if self.islinkrepair():
898 os.unlink(os.path.join(self.storedir, '_linkrepair'))
899 self.linkrepair = False
900 # XXX: mark package as invalid?
901 print 'The source link has been repaired. This directory can now be removed.'
902 if self.islink() and self.isexpanded():
903 self.update_local_filesmeta(revision=self.latest_rev())
905 self.update_local_filesmeta()
906 self.write_deletelist()
907 self.update_datastructs()
909 if self.filenamelist.count('_service'):
910 print 'The package contains a source service.'
911 for filename in self.todo:
912 if filename.startswith('_service:') and os.path.exists(filename):
913 os.unlink(filename) # remove local files
914 print_request_list(self.apiurl, self.prjname, self.name)
916 def write_conflictlist(self):
917 if len(self.in_conflict) == 0:
919 os.unlink(os.path.join(self.storedir, '_in_conflict'))
923 fname = os.path.join(self.storedir, '_in_conflict')
925 f.write('\n'.join(self.in_conflict))
929 def updatefile(self, n, revision):
930 filename = os.path.join(self.dir, n)
931 storefilename = os.path.join(self.storedir, n)
932 mtime = self.findfilebyname(n).mtime
934 get_source_file(self.apiurl, self.prjname, self.name, n, targetfilename=filename, revision=revision, progress_obj=self.progress_obj)
935 os.utime(filename, (-1, mtime))
937 shutil.copyfile(filename, storefilename)
939 def mergefile(self, n):
940 filename = os.path.join(self.dir, n)
941 storefilename = os.path.join(self.storedir, n)
942 myfilename = os.path.join(self.dir, n + '.mine')
943 upfilename = os.path.join(self.dir, n + '.r' + self.rev)
944 os.rename(filename, myfilename)
946 mtime = self.findfilebyname(n).mtime
947 get_source_file(self.apiurl, self.prjname, self.name, n,
948 revision=self.rev, targetfilename=upfilename, progress_obj=self.progress_obj)
949 os.utime(upfilename, (-1, mtime))
951 if binary_file(myfilename) or binary_file(upfilename):
953 shutil.copyfile(upfilename, filename)
954 shutil.copyfile(upfilename, storefilename)
955 self.in_conflict.append(n)
956 self.write_conflictlist()
960 # diff3 OPTIONS... MINE OLDER YOURS
961 merge_cmd = 'diff3 -m -E %s %s %s > %s' % (myfilename, storefilename, upfilename, filename)
962 # we would rather use the subprocess module, but it is not availablebefore 2.4
963 ret = subprocess.call(merge_cmd, shell=True)
965 # "An exit status of 0 means `diff3' was successful, 1 means some
966 # conflicts were found, and 2 means trouble."
968 # merge was successful... clean up
969 shutil.copyfile(upfilename, storefilename)
970 os.unlink(upfilename)
971 os.unlink(myfilename)
975 shutil.copyfile(upfilename, storefilename)
976 self.in_conflict.append(n)
977 self.write_conflictlist()
980 print >>sys.stderr, '\ndiff3 got in trouble... exit code:', ret
981 print >>sys.stderr, 'the command line was:'
982 print >>sys.stderr, merge_cmd
987 def update_local_filesmeta(self, revision=None):
989 Update the local _files file in the store.
990 It is replaced with the version pulled from upstream.
992 meta = ''.join(show_files_meta(self.apiurl, self.prjname, self.name, revision=revision))
994 f = open(os.path.join(self.storedir, '_files.new'), 'w')
997 os.rename(os.path.join(self.storedir, '_files.new'), os.path.join(self.storedir, '_files'))
999 if os.path.exists(os.path.join(self.storedir, '_files.new')):
1000 os.unlink(os.path.join(self.storedir, '_files.new'))
1003 def update_datastructs(self):
1005 Update the internal data structures if the local _files
1006 file has changed (e.g. update_local_filesmeta() has been
1010 files_tree = read_filemeta(self.dir)
1011 files_tree_root = files_tree.getroot()
1013 self.rev = files_tree_root.get('rev')
1014 self.srcmd5 = files_tree_root.get('srcmd5')
1016 self.linkinfo = Linkinfo()
1017 self.linkinfo.read(files_tree_root.find('linkinfo'))
1019 self.filenamelist = []
1021 for node in files_tree_root.findall('entry'):
1023 f = File(node.get('name'),
1025 int(node.get('size')),
1026 int(node.get('mtime')))
1028 # okay, a very old version of _files, which didn't contain any metadata yet...
1029 f = File(node.get('name'), '', 0, 0)
1030 self.filelist.append(f)
1031 self.filenamelist.append(f.name)
1033 self.to_be_deleted = read_tobedeleted(self.dir)
1034 self.in_conflict = read_inconflict(self.dir)
1035 self.linkrepair = os.path.isfile(os.path.join(self.storedir, '_linkrepair'))
1037 # gather unversioned files, but ignore some stuff
1038 self.excluded = [ i for i in os.listdir(self.dir)
1039 for j in conf.config['exclude_glob']
1040 if fnmatch.fnmatch(i, j) ]
1041 self.filenamelist_unvers = [ i for i in os.listdir(self.dir)
1042 if i not in self.excluded
1043 if i not in self.filenamelist ]
1046 """tells us if the package is a link (has 'linkinfo').
1047 A package with linkinfo is a package which links to another package.
1048 Returns True if the package is a link, otherwise False."""
1049 return self.linkinfo.islink()
1051 def isexpanded(self):
1052 """tells us if the package is a link which is expanded.
1053 Returns True if the package is expanded, otherwise False."""
1054 return self.linkinfo.isexpanded()
1056 def islinkrepair(self):
1057 """tells us if we are repairing a broken source link."""
1058 return self.linkrepair
1061 """tells us if we have pulled a link."""
1062 return os.path.isfile(os.path.join(self.storedir, '_pulled'))
1065 """tells us if the link is frozen."""
1066 return os.path.isfile(os.path.join(self.storedir, '_frozenlink'))
1068 def get_pulled_srcmd5(self):
1070 for line in open(os.path.join(self.storedir, '_pulled'), 'r'):
1071 pulledrev = line.strip()
1074 def haslinkerror(self):
1076 Returns True if the link is broken otherwise False.
1077 If the package is not a link it returns False.
1079 return self.linkinfo.haserror()
1081 def linkerror(self):
1083 Returns an error message if the link is broken otherwise None.
1084 If the package is not a link it returns None.
1086 return self.linkinfo.error
1088 def update_local_pacmeta(self):
1090 Update the local _meta file in the store.
1091 It is replaced with the version pulled from upstream.
1093 meta = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1094 f = open(os.path.join(self.storedir, '_meta'), 'w')
1098 def findfilebyname(self, n):
1099 for i in self.filelist:
1103 def status(self, n):
1107 file storefile file present STATUS
1108 exists exists in _files
1111 x x x ' ' if digest differs: 'M'
1112 and if in conflicts file: 'C'
1114 x - x 'D' and listed in _to_be_deleted
1116 - x - 'D' (when file in working copy is already deleted)
1117 - - x 'F' (new in repo, but not yet in working copy)
1122 known_by_meta = False
1124 exists_in_store = False
1125 if n in self.filenamelist:
1126 known_by_meta = True
1127 if os.path.exists(os.path.join(self.absdir, n)):
1129 if os.path.exists(os.path.join(self.storedir, n)):
1130 exists_in_store = True
1133 if exists and not exists_in_store and known_by_meta:
1135 elif n in self.to_be_deleted:
1137 elif n in self.in_conflict:
1139 elif exists and exists_in_store and known_by_meta:
1140 #print self.findfilebyname(n)
1141 if dgst(os.path.join(self.absdir, n)) != self.findfilebyname(n).md5:
1145 elif exists and not exists_in_store and not known_by_meta:
1147 elif exists and exists_in_store and not known_by_meta:
1149 elif not exists and exists_in_store and known_by_meta:
1151 elif not exists and not exists_in_store and known_by_meta:
1153 elif not exists and exists_in_store and not known_by_meta:
1155 elif not exists and not exists_in_store and not known_by_meta:
1156 # this case shouldn't happen (except there was a typo in the filename etc.)
1157 raise IOError('osc: \'%s\' is not under version control' % n)
1161 def comparePac(self, cmp_pac):
1163 This method compares the local filelist with
1164 the filelist of the passed package to see which files
1165 were added, removed and changed.
1172 for file in self.filenamelist+self.filenamelist_unvers:
1173 state = self.status(file)
1174 if state == 'A' and (not file in cmp_pac.filenamelist):
1175 added_files.append(file)
1176 elif file in cmp_pac.filenamelist and state == 'D':
1177 removed_files.append(file)
1178 elif state == ' ' and not file in cmp_pac.filenamelist:
1179 added_files.append(file)
1180 elif file in cmp_pac.filenamelist and state != 'A' and state != '?':
1181 if dgst(os.path.join(self.absdir, file)) != cmp_pac.findfilebyname(file).md5:
1182 changed_files.append(file)
1183 for file in cmp_pac.filenamelist:
1184 if not file in self.filenamelist:
1185 removed_files.append(file)
1186 removed_files = set(removed_files)
1188 return changed_files, added_files, removed_files
1190 def merge(self, otherpac):
1191 self.todo += otherpac.todo
1205 '\n '.join(self.filenamelist),
1213 def read_meta_from_spec(self, spec = None):
1218 # scan for spec files
1219 speclist = glob.glob(os.path.join(self.dir, '*.spec'))
1220 if len(speclist) == 1:
1221 specfile = speclist[0]
1222 elif len(speclist) > 1:
1223 print 'the following specfiles were found:'
1224 for file in speclist:
1226 print 'please specify one with --specfile'
1229 print 'no specfile was found - please specify one ' \
1233 data = read_meta_from_spec(specfile, 'Summary', 'Url', '%description')
1234 self.summary = data['Summary']
1235 self.url = data['Url']
1236 self.descr = data['%description']
1239 def update_package_meta(self, force=False):
1241 for the updatepacmetafromspec subcommand
1242 argument force supress the confirm question
1245 m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name))
1247 root = ET.fromstring(m)
1248 root.find('title').text = self.summary
1249 root.find('description').text = ''.join(self.descr)
1250 url = root.find('url')
1252 url = ET.SubElement(root, 'url')
1255 u = makeurl(self.apiurl, ['source', self.prjname, self.name, '_meta'])
1256 mf = metafile(u, ET.tostring(root))
1259 print '*' * 36, 'old', '*' * 36
1261 print '*' * 36, 'new', '*' * 36
1262 print ET.tostring(root)
1264 repl = raw_input('Write? (y/N/e) ')
1275 def mark_frozen(self):
1276 store_write_string(self.absdir, '_frozenlink', '')
1278 print "The link in this package is currently broken. I have checked"
1279 print "out the last working version instead, please use 'osc pull'"
1280 print "to repair the link."
1283 def unmark_frozen(self):
1284 if os.path.exists(os.path.join(self.storedir, '_frozenlink')):
1285 os.unlink(os.path.join(self.storedir, '_frozenlink'))
1287 def latest_rev(self):
1288 if self.islinkrepair():
1289 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrepair=1)
1290 elif self.islink() and self.isexpanded():
1291 if self.isfrozen() or self.ispulled():
1292 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1295 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name)
1298 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev=self.linkinfo.srcmd5)
1300 upstream_rev = show_upstream_xsrcmd5(self.apiurl, self.prjname, self.name, linkrev="base")
1303 upstream_rev = show_upstream_rev(self.apiurl, self.prjname, self.name)
1306 def update(self, rev = None, service_files = False):
1307 # save filelist and (modified) status before replacing the meta file
1308 saved_filenames = self.filenamelist
1309 saved_modifiedfiles = [ f for f in self.filenamelist if self.status(f) == 'M' ]
1312 self.update_local_filesmeta(rev)
1313 self = Package(self.dir, progress_obj=self.progress_obj)
1315 # which files do no longer exist upstream?
1316 disappeared = [ f for f in saved_filenames if f not in self.filenamelist ]
1318 pathn = getTransActPath(self.dir)
1320 for filename in saved_filenames:
1321 if not filename.startswith('_service:') and filename in disappeared:
1322 print statfrmt('D', os.path.join(pathn, filename))
1323 # keep file if it has local modifications
1324 if oldp.status(filename) == ' ':
1325 self.delete_localfile(filename)
1326 self.delete_storefile(filename)
1328 for filename in self.filenamelist:
1330 state = self.status(filename)
1331 if not service_files and filename.startswith('_service:'):
1333 elif state == 'M' and self.findfilebyname(filename).md5 == oldp.findfilebyname(filename).md5:
1334 # no merge necessary... local file is changed, but upstream isn't
1336 elif state == 'M' and filename in saved_modifiedfiles:
1337 status_after_merge = self.mergefile(filename)
1338 print statfrmt(status_after_merge, os.path.join(pathn, filename))
1340 self.updatefile(filename, rev)
1341 print statfrmt('U', os.path.join(pathn, filename))
1343 self.updatefile(filename, rev)
1344 print 'Restored \'%s\'' % os.path.join(pathn, filename)
1346 self.updatefile(filename, rev)
1347 print statfrmt('A', os.path.join(pathn, filename))
1348 elif state == 'D' and self.findfilebyname(filename).md5 != oldp.findfilebyname(filename).md5:
1349 self.updatefile(filename, rev)
1350 self.delete_storefile(filename)
1351 print statfrmt('U', os.path.join(pathn, filename))
1355 self.update_local_pacmeta()
1357 #print ljust(p.name, 45), 'At revision %s.' % p.rev
1358 print 'At revision %s.' % self.rev
1360 if not service_files:
1361 self.run_source_services()
1363 def run_source_services(self):
1364 if self.filenamelist.count('_service'):
1365 service = ET.parse(os.path.join(self.absdir, '_service')).getroot()
1368 si.execute(self.absdir)
1370 def prepare_filelist(self):
1371 """Prepare a list of files, which will be processed by process_filelist
1372 method. This allows easy modifications of a file list in commit
1376 self.todo = self.filenamelist + self.filenamelist_unvers
1380 for f in (f for f in self.todo if not os.path.isdir(f)):
1382 status = self.status(f)
1385 ret += "%s %s %s\n" % (action, status, f)
1388 # Edit a filelist for package %s
1390 # l, leave = leave a file as is
1391 # r, remove = remove a file
1392 # a, add = add a file
1394 # If you remove file from a list, it will be unchanged
1395 # If you remove all, commit will be aborted"""
1399 def edit_filelist(self):
1400 """Opens a package list in editor for eediting. This allows easy
1401 modifications of it just by simple text editing
1405 (fd, filename) = tempfile.mkstemp(prefix = 'osc-filelist', suffix = '.txt')
1406 f = os.fdopen(fd, 'w')
1407 f.write(self.prepare_filelist())
1409 mtime_orig = os.stat(filename).st_mtime
1411 if sys.platform[:3] != 'win':
1412 editor = os.getenv('EDITOR', default='vim')
1414 editor = os.getenv('EDITOR', default='notepad')
1416 subprocess.call('%s %s' % (editor, filename), shell=True)
1417 mtime = os.stat(filename).st_mtime
1418 if mtime_orig < mtime:
1419 filelist = open(filename).readlines()
1423 raise oscerr.UserAbort()
1425 return self.process_filelist(filelist)
1427 def process_filelist(self, filelist):
1428 """Process a filelist - it add/remove or leave files. This depends on
1429 user input. If no file is processed, it raises an ValueError
1433 for line in (l.strip() for l in filelist if (l[0] != "#" or l.strip() != '')):
1435 foo = line.split(' ')
1437 action, state, name = (foo[0], ' ', foo[3])
1439 action, state, name = (foo[0], foo[1], foo[2])
1442 action = action.lower()
1445 if action in ('r', 'remove'):
1446 if self.status(name) == '?':
1448 if name in self.todo:
1449 self.todo.remove(name)
1451 self.delete_file(name, True)
1452 elif action in ('a', 'add'):
1453 if self.status(name) != '?':
1454 print "Cannot add file %s with state %s, skipped" % (name, self.status(name))
1457 elif action in ('l', 'leave'):
1460 raise ValueError("Unknow action `%s'" % action)
1463 raise ValueError("Empty filelist")
1466 """for objects to represent the review state in a request"""
1467 def __init__(self, state=None, by_user=None, by_group=None, who=None, when=None, comment=None):
1469 self.by_user = by_user
1470 self.by_group = by_group
1473 self.comment = comment
1476 """for objects to represent the "state" of a request"""
1477 def __init__(self, name=None, who=None, when=None, comment=None):
1481 self.comment = comment
1484 """represents an action"""
1485 def __init__(self, type, src_project, src_package, src_rev, dst_project, dst_package, src_update):
1487 self.src_project = src_project
1488 self.src_package = src_package
1489 self.src_rev = src_rev
1490 self.dst_project = dst_project
1491 self.dst_package = dst_package
1492 self.src_update = src_update
1495 """represent a request and holds its metadata
1496 it has methods to read in metadata from xml,
1497 different views, ..."""
1500 self.state = RequestState()
1503 self.last_author = None
1506 self.statehistory = []
1509 def read(self, root):
1510 self.reqid = int(root.get('id'))
1511 actions = root.findall('action')
1512 if len(actions) == 0:
1513 actions = [ root.find('submit') ] # for old style requests
1515 for action in actions:
1516 type = action.get('type', 'submit')
1518 src_prj = src_pkg = src_rev = dst_prj = dst_pkg = src_update = None
1519 if action.findall('source'):
1520 n = action.find('source')
1521 src_prj = n.get('project', None)
1522 src_pkg = n.get('package', None)
1523 src_rev = n.get('rev', None)
1524 if action.findall('target'):
1525 n = action.find('target')
1526 dst_prj = n.get('project', None)
1527 dst_pkg = n.get('package', None)
1528 if action.findall('options'):
1529 n = action.find('options')
1530 if n.findall('sourceupdate'):
1531 src_update = n.find('sourceupdate').text.strip()
1532 self.add_action(type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update)
1534 msg = 'invalid request format:\n%s' % ET.tostring(root)
1535 raise oscerr.APIError(msg)
1538 n = root.find('state')
1539 self.state.name, self.state.who, self.state.when \
1540 = n.get('name'), n.get('who'), n.get('when')
1542 self.state.comment = n.find('comment').text.strip()
1544 self.state.comment = None
1546 # read the review states
1547 for r in root.findall('review'):
1549 s.state = r.get('state')
1550 s.by_user = r.get('by_user')
1551 s.by_group = r.get('by_group')
1552 s.who = r.get('who')
1553 s.when = r.get('when')
1555 s.comment = r.find('comment').text.strip()
1558 self.reviews.append(s)
1560 # read the state history
1561 for h in root.findall('history'):
1563 s.name = h.get('name')
1564 s.who = h.get('who')
1565 s.when = h.get('when')
1567 s.comment = h.find('comment').text.strip()
1570 self.statehistory.append(s)
1571 self.statehistory.reverse()
1573 # read a description, if it exists
1575 n = root.find('description').text
1580 def add_action(self, type, src_prj, src_pkg, src_rev, dst_prj, dst_pkg, src_update):
1581 self.actions.append(Action(type, src_prj, src_pkg, src_rev,
1582 dst_prj, dst_pkg, src_update)
1585 def list_view(self):
1586 ret = '%6d State:%-7s By:%-12s When:%-12s' % (self.reqid, self.state.name, self.state.who, self.state.when)
1588 for a in self.actions:
1589 dst = "%s/%s" % (a.dst_project, a.dst_package)
1590 if a.src_package == a.dst_package:
1594 if a.type=="submit":
1595 sr_source="%s/%s -> " % (a.src_project, a.src_package)
1596 if a.type=="change_devel":
1597 dst = "developed in %s/%s" % (a.src_project, a.src_package)
1598 sr_source="%s/%s" % (a.dst_project, a.dst_package)
1600 ret += '\n %s: %-50s %-20s ' % \
1601 (a.type, sr_source, dst)
1603 if self.statehistory and self.statehistory[0]:
1605 for h in self.statehistory:
1606 who.append("%s(%s)" % (h.who,h.name))
1608 ret += "\n From: %s" % (' -> '.join(who))
1610 txt = re.sub(r'[^[:isprint:]]', '_', self.descr)
1612 lines = txt.splitlines()
1613 wrapper = textwrap.TextWrapper( width = 80,
1614 initial_indent=' Descr: ',
1615 subsequent_indent=' ')
1616 ret += "\n" + wrapper.fill(lines[0])
1617 wrapper.initial_indent = ' '
1618 for line in lines[1:]:
1619 ret += "\n" + wrapper.fill(line)
1625 def __cmp__(self, other):
1626 return cmp(self.reqid, other.reqid)
1630 for action in self.actions:
1631 action_list=" %s: " % (action.type)
1632 if action.type=="submit":
1635 r="(r%s)" % (action.src_rev)
1637 if action.src_update:
1638 m="(%s)" % (action.src_update)
1639 action_list=action_list+" %s/%s%s%s -> %s" % ( action.src_project, action.src_package, r, m, action.dst_project )
1640 if action.dst_package:
1641 action_list=action_list+"/%s" % ( action.dst_package )
1642 elif action.type=="delete":
1643 action_list=action_list+" %s" % ( action.dst_project )
1644 if action.dst_package:
1645 action_list=action_list+"/%s" % ( action.dst_package )
1646 elif action.type=="change_devel":
1647 action_list=action_list+" %s/%s developed in %s/%s" % \
1648 ( action.dst_project, action.dst_package, action.src_project, action.src_package )
1649 action_list=action_list+"\n"
1664 self.state.name, self.state.when, self.state.who,
1667 if len(self.reviews):
1668 reviewitems = [ '%-10s %s %s %s %s %s' \
1669 % (i.state, i.by_user, i.by_group, i.when, i.who, i.comment) \
1670 for i in self.reviews ]
1671 s += '\nReview: ' + '\n '.join(reviewitems)
1674 if len(self.statehistory):
1675 histitems = [ '%-10s %s %s' \
1676 % (i.name, i.when, i.who) \
1677 for i in self.statehistory ]
1678 s += '\nHistory: ' + '\n '.join(histitems)
1685 """format time as Apr 02 18:19
1687 depending on whether it is in the current year
1691 if time.localtime()[0] == time.localtime(t)[0]:
1693 return time.strftime('%b %d %H:%M',time.localtime(t))
1695 return time.strftime('%b %d %Y',time.localtime(t))
1698 def is_project_dir(d):
1699 return os.path.exists(os.path.join(d, store, '_project')) and not \
1700 os.path.exists(os.path.join(d, store, '_package'))
1703 def is_package_dir(d):
1704 return os.path.exists(os.path.join(d, store, '_project')) and \
1705 os.path.exists(os.path.join(d, store, '_package'))
1707 def parse_disturl(disturl):
1708 """Parse a disturl, returns tuple (apiurl, project, source, repository,
1709 revision), else raises an oscerr.WrongArgs exception
1712 m = DISTURL_RE.match(disturl)
1714 raise oscerr.WrongArgs("`%s' does not look like disturl" % disturl)
1716 apiurl = m.group('apiurl')
1717 if apiurl.split('.')[0] != 'api':
1718 apiurl = 'https://api.' + ".".join(apiurl.split('.')[1:])
1719 return (apiurl, m.group('project'), m.group('source'), m.group('repository'), m.group('revision'))
1721 def parse_buildlogurl(buildlogurl):
1722 """Parse a build log url, returns a tuple (apiurl, project, package,
1723 repository, arch), else raises oscerr.WrongArgs exception"""
1725 global BUILDLOGURL_RE
1727 m = BUILDLOGURL_RE.match(buildlogurl)
1729 raise oscerr.WrongArgs('\'%s\' does not look like url with a build log' % buildlogurl)
1731 return (m.group('apiurl'), m.group('project'), m.group('package'), m.group('repository'), m.group('arch'))
1734 """Split command line arguments like 'foo/bar' into 'foo' 'bar'.
1735 This is handy to allow copy/paste a project/package combination in this form.
1737 Trailing slashes are removed before the split, because the split would
1738 otherwise give an additional empty string.
1746 def expand_proj_pack(args, idx=0, howmany=0):
1747 """looks for occurance of '.' at the position idx.
1748 If howmany is 2, both proj and pack are expanded together
1749 using the current directory, or none of them, if not possible.
1750 If howmany is 0, proj is expanded if possible, then, if there
1751 is no idx+1 element in args (or args[idx+1] == '.'), pack is also
1752 expanded, if possible.
1753 If howmany is 1, only proj is expanded if possible.
1755 If args[idx] does not exists, an implicit '.' is assumed.
1756 if not enough elements up to idx exist, an error is raised.
1758 See also parseargs(args), slash_split(args), findpacs(args)
1759 All these need unification, somehow.
1762 # print args,idx,howmany
1765 raise oscerr.WrongArgs('not enough argument, expected at least %d' % idx)
1767 if len(args) == idx:
1769 if args[idx+0] == '.':
1770 if howmany == 0 and len(args) > idx+1:
1771 if args[idx+1] == '.':
1773 # remove one dot and make sure to expand both proj and pack
1778 # print args,idx,howmany
1780 args[idx+0] = store_read_project('.')
1783 package = store_read_package('.')
1784 args.insert(idx+1, package)
1788 package = store_read_package('.')
1789 args.insert(idx+1, package)
1793 def findpacs(files, progress_obj=None):
1794 """collect Package objects belonging to the given files
1795 and make sure each Package is returned only once"""
1798 p = filedir_to_pac(f, progress_obj)
1801 if i.name == p.name:
1811 def read_filemeta(dir):
1813 r = ET.parse(os.path.join(dir, store, '_files'))
1814 except SyntaxError, e:
1815 raise oscerr.NoWorkingCopy('\'%s\' is not a valid working copy.\n'
1816 'When parsing .osc/_files, the following error was encountered:\n'
1821 def read_tobedeleted(dir):
1823 fname = os.path.join(dir, store, '_to_be_deleted')
1825 if os.path.exists(fname):
1826 r = [ line.strip() for line in open(fname) ]
1831 def read_inconflict(dir):
1833 fname = os.path.join(dir, store, '_in_conflict')
1835 if os.path.exists(fname):
1836 r = [ line.strip() for line in open(fname) ]
1841 def parseargs(list_of_args):
1842 """Convenience method osc's commandline argument parsing.
1844 If called with an empty tuple (or list), return a list containing the current directory.
1845 Otherwise, return a list of the arguments."""
1847 return list(list_of_args)
1852 def filedir_to_pac(f, progress_obj=None):
1853 """Takes a working copy path, or a path to a file inside a working copy,
1854 and returns a Package object instance
1856 If the argument was a filename, add it onto the "todo" list of the Package """
1858 if os.path.isdir(f):
1860 p = Package(wd, progress_obj=progress_obj)
1863 wd = os.path.dirname(f)
1866 p = Package(wd, progress_obj=progress_obj)
1867 p.todo = [ os.path.basename(f) ]
1872 def statfrmt(statusletter, filename):
1873 return '%s %s' % (statusletter, filename)
1876 def pathjoin(a, *p):
1877 """Join two or more pathname components, inserting '/' as needed. Cut leading ./"""
1878 path = os.path.join(a, *p)
1879 if path.startswith('./'):
1884 def makeurl(baseurl, l, query=[]):
1885 """Given a list of path compoments, construct a complete URL.
1887 Optional parameters for a query string can be given as a list, as a
1888 dictionary, or as an already assembled string.
1889 In case of a dictionary, the parameters will be urlencoded by this
1890 function. In case of a list not -- this is to be backwards compatible.
1893 if conf.config['verbose'] > 1:
1894 print 'makeurl:', baseurl, l, query
1896 if type(query) == type(list()):
1897 query = '&'.join(query)
1898 elif type(query) == type(dict()):
1899 query = urlencode(query)
1901 scheme, netloc = urlsplit(baseurl)[0:2]
1902 return urlunsplit((scheme, netloc, '/'.join(l), query, ''))
1905 def http_request(method, url, headers={}, data=None, file=None, timeout=100):
1906 """wrapper around urllib2.urlopen for error handling,
1907 and to support additional (PUT, DELETE) methods"""
1911 if conf.config['http_debug']:
1914 print '--', method, url
1916 if method == 'POST' and not file and not data:
1917 # adding data to an urllib2 request transforms it into a POST
1920 req = urllib2.Request(url)
1922 api_host_options=conf.get_apiurl_api_host_options(url)
1924 for header, value in api_host_options['http_headers']:
1925 req.add_header(header, value)
1927 req.get_method = lambda: method
1929 # POST requests are application/x-www-form-urlencoded per default
1930 # since we change the request into PUT, we also need to adjust the content type header
1931 if method == 'PUT' or (method == 'POST' and data):
1932 req.add_header('Content-Type', 'application/octet-stream')
1934 if type(headers) == type({}):
1935 for i in headers.keys():
1937 req.add_header(i, headers[i])
1939 if file and not data:
1940 size = os.path.getsize(file)
1942 data = open(file, 'rb').read()
1945 filefd = open(file, 'rb')
1947 if sys.platform[:3] != 'win':
1948 data = mmap.mmap(filefd.fileno(), os.path.getsize(file), mmap.MAP_SHARED, mmap.PROT_READ)
1950 data = mmap.mmap(filefd.fileno(), os.path.getsize(file))
1952 except EnvironmentError, e:
1954 sys.exit('\n\n%s\nThe file \'%s\' could not be memory mapped. It is ' \
1955 '\non a filesystem which does not support this.' % (e, file))
1956 elif hasattr(e, 'winerror') and e.winerror == 5:
1957 # falling back to the default io
1958 data = open(file, 'rb').read()
1962 if conf.config['debug']: print method, url
1964 old_timeout = socket.getdefaulttimeout()
1965 # XXX: dirty hack as timeout doesn't work with python-m2crypto
1966 if old_timeout != timeout and not api_host_options['sslcertck']:
1967 socket.setdefaulttimeout(timeout)
1969 fd = urllib2.urlopen(req, data=data)
1971 if old_timeout != timeout and not api_host_options['sslcertck']:
1972 socket.setdefaulttimeout(old_timeout)
1973 if hasattr(conf.cookiejar, 'save'):
1974 conf.cookiejar.save(ignore_discard=True)
1976 if filefd: filefd.close()
1981 def http_GET(*args, **kwargs): return http_request('GET', *args, **kwargs)
1982 def http_POST(*args, **kwargs): return http_request('POST', *args, **kwargs)
1983 def http_PUT(*args, **kwargs): return http_request('PUT', *args, **kwargs)
1984 def http_DELETE(*args, **kwargs): return http_request('DELETE', *args, **kwargs)
1987 def init_project_dir(apiurl, dir, project):
1988 if not os.path.exists(dir):
1989 if conf.config['checkout_no_colon']:
1990 os.makedirs(dir) # helpful with checkout_no_colon
1993 if not os.path.exists(os.path.join(dir, store)):
1994 os.mkdir(os.path.join(dir, store))
1996 # print 'project=',project,' dir=',dir
1997 store_write_project(dir, project)
1998 store_write_apiurl(dir, apiurl)
1999 if conf.config['do_package_tracking']:
2000 store_write_initial_packages(dir, project, [])
2002 def init_package_dir(apiurl, project, package, dir, revision=None, files=True):
2003 if not os.path.isdir(store):
2006 f = open('_project', 'w')
2007 f.write(project + '\n')
2009 f = open('_package', 'w')
2010 f.write(package + '\n')
2014 f = open('_files', 'w')
2015 f.write(''.join(show_files_meta(apiurl, project, package, revision=revision)))
2019 ET.ElementTree(element=ET.Element('directory')).write('_files')
2021 f = open('_osclib_version', 'w')
2022 f.write(__store_version__ + '\n')
2025 store_write_apiurl(os.path.pardir, apiurl)
2031 def check_store_version(dir):
2032 versionfile = os.path.join(dir, store, '_osclib_version')
2034 v = open(versionfile).read().strip()
2039 msg = 'Error: "%s" is not an osc working copy.' % os.path.abspath(dir)
2040 if os.path.exists(os.path.join(dir, '.svn')):
2041 msg = msg + '\nTry svn instead of osc.'
2042 raise oscerr.NoWorkingCopy(msg)
2044 if v != __store_version__:
2045 if v in ['0.2', '0.3', '0.4', '0.5', '0.6', '0.7', '0.8', '0.9', '0.95', '0.96', '0.97', '0.98', '0.99']:
2046 # version is fine, no migration needed
2047 f = open(versionfile, 'w')
2048 f.write(__store_version__ + '\n')
2051 msg = 'The osc metadata of your working copy "%s"' % dir
2052 msg += '\nhas __store_version__ = %s, but it should be %s' % (v, __store_version__)
2053 msg += '\nPlease do a fresh checkout or update your client. Sorry about the inconvenience.'
2054 raise oscerr.WorkingCopyWrongVersion, msg
2057 def meta_get_packagelist(apiurl, prj):
2059 u = makeurl(apiurl, ['source', prj])
2061 root = ET.parse(f).getroot()
2062 return [ node.get('name') for node in root.findall('entry') ]
2065 def meta_get_filelist(apiurl, prj, package, verbose=False, expand=False, revision=None):
2066 """return a list of file names,
2067 or a list File() instances if verbose=True"""
2073 query['rev'] = revision
2075 query['rev'] = 'latest'
2077 u = makeurl(apiurl, ['source', prj, package], query=query)
2079 root = ET.parse(f).getroot()
2082 return [ node.get('name') for node in root.findall('entry') ]
2086 # rev = int(root.get('rev')) # don't force int. also allow srcmd5 here.
2087 rev = root.get('rev')
2088 for node in root.findall('entry'):
2089 f = File(node.get('name'),
2091 int(node.get('size')),
2092 int(node.get('mtime')))
2098 def meta_get_project_list(apiurl):
2099 u = makeurl(apiurl, ['source'])
2101 root = ET.parse(f).getroot()
2102 return sorted([ node.get('name') for node in root ])
2105 def show_project_meta(apiurl, prj):
2106 url = makeurl(apiurl, ['source', prj, '_meta'])
2108 return f.readlines()
2111 def show_project_conf(apiurl, prj):
2112 url = makeurl(apiurl, ['source', prj, '_config'])
2114 return f.readlines()
2117 def show_package_trigger_reason(apiurl, prj, pac, repo, arch):
2118 url = makeurl(apiurl, ['build', prj, repo, arch, pac, '_reason'])
2122 except urllib2.HTTPError, e:
2123 e.osc_msg = 'Error getting trigger reason for project \'%s\' package \'%s\'' % (prj, pac)
2127 def show_package_meta(apiurl, prj, pac):
2128 url = makeurl(apiurl, ['source', prj, pac, '_meta'])
2131 return f.readlines()
2132 except urllib2.HTTPError, e:
2133 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2137 def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with_project):
2139 path.append('source')
2145 path.append('_attribute')
2147 path.append(attribute)
2150 query.append("with_default=1")
2152 query.append("with_project=1")
2153 url = makeurl(apiurl, path, query)
2156 return f.readlines()
2157 except urllib2.HTTPError, e:
2158 e.osc_msg = 'Error getting meta for project \'%s\' package \'%s\'' % (prj, pac)
2162 def show_develproject(apiurl, prj, pac):
2163 m = show_package_meta(apiurl, prj, pac)
2165 return ET.fromstring(''.join(m)).find('devel').get('project')
2170 def show_pattern_metalist(apiurl, prj):
2171 url = makeurl(apiurl, ['source', prj, '_pattern'])
2175 except urllib2.HTTPError, e:
2176 e.osc_msg = 'show_pattern_metalist: Error getting pattern list for project \'%s\'' % prj
2178 r = [ node.get('name') for node in tree.getroot() ]
2183 def show_pattern_meta(apiurl, prj, pattern):
2184 url = makeurl(apiurl, ['source', prj, '_pattern', pattern])
2187 return f.readlines()
2188 except urllib2.HTTPError, e:
2189 e.osc_msg = 'show_pattern_meta: Error getting pattern \'%s\' for project \'%s\'' % (pattern, prj)
2194 """metafile that can be manipulated and is stored back after manipulation."""
2195 def __init__(self, url, input, change_is_required=False, file_ext='.xml'):
2199 self.change_is_required = change_is_required
2200 (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext)
2201 f = os.fdopen(fd, 'w')
2202 f.write(''.join(input))
2204 self.hash_orig = dgst(self.filename)
2207 hash = dgst(self.filename)
2208 if self.change_is_required == True and hash == self.hash_orig:
2209 print 'File unchanged. Not saving.'
2210 os.unlink(self.filename)
2213 print 'Sending meta data...'
2214 # don't do any exception handling... it's up to the caller what to do in case
2216 http_PUT(self.url, file=self.filename)
2217 os.unlink(self.filename)
2221 if sys.platform[:3] != 'win':
2222 editor = os.getenv('EDITOR', default='vim')
2224 editor = os.getenv('EDITOR', default='notepad')
2227 subprocess.call('%s %s' % (editor, self.filename), shell=True)
2231 except urllib2.HTTPError, e:
2232 error_help = "%d" % e.code
2233 if e.headers.get('X-Opensuse-Errorcode'):
2234 error_help = "%s (%d)" % (e.headers.get('X-Opensuse-Errorcode'), e.code)
2236 print >>sys.stderr, 'BuildService API error:', error_help
2237 # examine the error - we can't raise an exception because we might want
2240 if '<summary>' in data:
2241 print >>sys.stderr, data.split('<summary>')[1].split('</summary>')[0]
2242 input = raw_input('Try again? ([y/N]): ')
2243 if input not in ['y', 'Y']:
2249 if os.path.exists(self.filename):
2250 print 'discarding %s' % self.filename
2251 os.unlink(self.filename)
2254 # different types of metadata
2255 metatypes = { 'prj': { 'path': 'source/%s/_meta',
2256 'template': new_project_templ,
2259 'pkg': { 'path' : 'source/%s/%s/_meta',
2260 'template': new_package_templ,
2263 'attribute': { 'path' : 'source/%s/%s/_meta',
2264 'template': new_attribute_templ,
2267 'prjconf': { 'path': 'source/%s/_config',
2271 'user': { 'path': 'person/%s',
2272 'template': new_user_template,
2275 'pattern': { 'path': 'source/%s/_pattern/%s',
2276 'template': new_pattern_template,
2281 def meta_exists(metatype,
2288 apiurl = conf.config['apiurl']
2289 url = make_meta_url(metatype, path_args, apiurl)
2291 data = http_GET(url).readlines()
2292 except urllib2.HTTPError, e:
2293 if e.code == 404 and create_new:
2294 data = metatypes[metatype]['template']
2296 data = StringIO(data % template_args).readlines()
2301 def make_meta_url(metatype, path_args=None, apiurl=None):
2303 apiurl = conf.config['apiurl']
2304 if metatype not in metatypes.keys():
2305 raise AttributeError('make_meta_url(): Unknown meta type \'%s\'' % metatype)
2306 path = metatypes[metatype]['path']
2309 path = path % path_args
2311 return makeurl(apiurl, [path])
2314 def edit_meta(metatype,
2319 change_is_required=False,
2323 apiurl = conf.config['apiurl']
2325 data = meta_exists(metatype,
2328 create_new = metatype != 'prjconf', # prjconf always exists, 404 => unknown prj
2332 change_is_required = True
2334 url = make_meta_url(metatype, path_args, apiurl)
2335 f=metafile(url, data, change_is_required, metatypes[metatype]['file_ext'])
2343 def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, linkrepair=False):
2346 query['rev'] = revision
2348 query['rev'] = 'latest'
2350 query['linkrev'] = linkrev
2351 elif conf.config['linkcontrol']:
2352 query['linkrev'] = 'base'
2356 query['emptylink'] = 1
2357 f = http_GET(makeurl(apiurl, ['source', prj, pac], query=query))
2358 return f.readlines()
2361 def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None):
2362 m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision)
2363 return ET.fromstring(''.join(m)).get('srcmd5')
2366 def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False):
2367 m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair)
2369 # only source link packages have a <linkinfo> element.
2370 li_node = ET.fromstring(''.join(m)).find('linkinfo')
2378 raise oscerr.LinkExpandError(prj, pac, li.error)
2382 def show_upstream_rev(apiurl, prj, pac):
2383 m = show_files_meta(apiurl, prj, pac)
2384 return ET.fromstring(''.join(m)).get('rev')
2387 def read_meta_from_spec(specfile, *args):
2388 import codecs, locale, re
2390 Read tags and sections from spec file. To read out
2391 a tag the passed argument mustn't end with a colon. To
2392 read out a section the passed argument must start with
2394 This method returns a dictionary which contains the
2398 if not os.path.isfile(specfile):
2399 raise IOError('\'%s\' is not a regular file' % specfile)
2402 lines = codecs.open(specfile, 'r', locale.getpreferredencoding()).readlines()
2403 except UnicodeDecodeError:
2404 lines = open(specfile).readlines()
2411 if itm.startswith('%'):
2412 sections.append(itm)
2416 tag_pat = '(?P<tag>^%s)\s*:\s*(?P<val>.*)'
2418 m = re.compile(tag_pat % tag, re.I | re.M).search(''.join(lines))
2419 if m and m.group('val'):
2420 spec_data[tag] = m.group('val').strip()
2422 print >>sys.stderr, 'error - tag \'%s\' does not exist' % tag
2425 section_pat = '^%s\s*?$'
2426 for section in sections:
2427 m = re.compile(section_pat % section, re.I | re.M).search(''.join(lines))
2429 start = lines.index(m.group()+'\n') + 1
2431 print >>sys.stderr, 'error - section \'%s\' does not exist' % section
2434 for line in lines[start:]:
2435 if line.startswith('%'):
2438 spec_data[section] = data
2443 def edit_message(footer='', template=''):
2444 delim = '--This line, and those below, will be ignored--\n'
2446 (fd, filename) = tempfile.mkstemp(prefix = 'osc-commitmsg', suffix = '.diff')
2447 f = os.fdopen(fd, 'w')
2456 if sys.platform[:3] != 'win':
2457 editor = os.getenv('EDITOR', default='vim')
2459 editor = os.getenv('EDITOR', default='notepad')
2462 subprocess.call('%s %s' % (editor, filename), shell=True)
2463 msg = open(filename).read().split(delim)[0].rstrip()
2468 input = raw_input('Log message not specified\n'
2469 'a)bort, c)ontinue, e)dit: ')
2471 raise oscerr.UserAbort()
2481 def create_delete_request(apiurl, project, package, message):
2486 package = """package="%s" """ % (package)
2492 <action type="delete">
2493 <target project="%s" %s/>
2496 <description>%s</description>
2498 """ % (project, package,
2499 cgi.escape(message or ''))
2501 u = makeurl(apiurl, ['request'], query='cmd=create')
2502 f = http_POST(u, data=xml)
2504 root = ET.parse(f).getroot()
2505 return root.get('id')
2508 def create_change_devel_request(apiurl,
2509 devel_project, devel_package,
2516 <action type="change_devel">
2517 <source project="%s" package="%s" />
2518 <target project="%s" package="%s" />
2521 <description>%s</description>
2523 """ % (devel_project,
2527 cgi.escape(message or ''))
2529 u = makeurl(apiurl, ['request'], query='cmd=create')
2530 f = http_POST(u, data=xml)
2532 root = ET.parse(f).getroot()
2533 return root.get('id')
2536 # This creates an old style submit request for server api 1.0
2537 def create_submit_request(apiurl,
2538 src_project, src_package,
2539 dst_project=None, dst_package=None,
2540 message=None, orev=None, src_update=None):
2545 options_block="""<options><sourceupdate>%s</sourceupdate></options> """ % (src_update)
2547 # Yes, this kind of xml construction is horrible
2552 packagexml = """package="%s" """ %( dst_package )
2553 targetxml = """<target project="%s" %s /> """ %( dst_project, packagexml )
2554 # XXX: keep the old template for now in order to work with old obs instances
2556 <request type="submit">
2558 <source project="%s" package="%s" rev="%s"/>
2563 <description>%s</description>
2567 orev or show_upstream_rev(apiurl, src_project, src_package),
2570 cgi.escape(message or ""))
2572 u = makeurl(apiurl, ['request'], query='cmd=create')
2573 f = http_POST(u, data=xml)
2575 root = ET.parse(f).getroot()
2576 return root.get('id')
2579 def get_request(apiurl, reqid):
2580 u = makeurl(apiurl, ['request', reqid])
2582 root = ET.parse(f).getroot()
2589 def change_review_state(apiurl, reqid, newstate, by_user='', by_group='', message='', supersed=''):
2592 query={'cmd': 'changereviewstate', 'newstate': newstate, 'by_user': by_user, 'superseded_by': supersed})
2593 f = http_POST(u, data=message)
2596 def change_request_state(apiurl, reqid, newstate, message='', supersed=''):
2599 query={'cmd': 'changestate', 'newstate': newstate, 'superseded_by': supersed})
2600 f = http_POST(u, data=message)
2604 def get_request_list(apiurl, project, package, req_who='', req_state=('new',), req_type=None, exclude_target_projects=[]):
2606 if not 'all' in req_state:
2607 for state in req_state:
2610 match += '(state/@name=\'%s\')' % quote_plus(state)
2614 match += '(state/@who=\'%(who)s\' or history/@who=\'%(who)s\')' % {'who': quote_plus(req_who)}
2616 # XXX: we cannot use the '|' in the xpath expression because it is not supported
2620 todo['project'] = project
2622 todo['package'] = package
2623 for kind, val in todo.iteritems():
2626 match += '(action/target/@%(kind)s=\'%(val)s\' or ' \
2627 'action/source/@%(kind)s=\'%(val)s\' or ' \
2628 'submit/target/@%(kind)s=\'%(val)s\' or ' \
2629 'submit/source/@%(kind)s=\'%(val)s\')' % {'kind': kind, 'val': val}
2633 match += '(action/@type=\'%s\')' % req_type
2634 for i in exclude_target_projects:
2637 match += '(not(action/target/@project=\'%(prj)s\' or ' \
2638 'submit/target/@project=\'%(prj)s\'))' % {'prj': quote_plus(i)}
2640 if conf.config['verbose'] > 1:
2641 print '[ %s ]' % match
2642 u = makeurl(apiurl, ['search', 'request'], ['match=%s' % match.replace(' ', '%20')])
2644 collection = ET.parse(f).getroot()
2646 for root in collection.findall('request'):
2654 def get_request_log(apiurl, reqid):
2655 r = get_request(conf.config['apiurl'], reqid)
2657 frmt = '-' * 76 + '\n%s | %s | %s\n\n%s'
2658 # the description of the request is used for the initial log entry
2659 # otherwise its comment attribute would contain None
2660 if len(r.statehistory) >= 1:
2661 r.statehistory[-1].comment = r.descr
2663 r.state.comment = r.descr
2664 for state in [ r.state ] + r.statehistory:
2665 s = frmt % (state.name, state.who, state.when, str(state.comment))
2670 def get_user_meta(apiurl, user):
2671 u = makeurl(apiurl, ['person', quote_plus(user)])
2674 return ''.join(f.readlines())
2675 except urllib2.HTTPError:
2676 print 'user \'%s\' not found' % user
2680 def get_user_data(apiurl, user, *tags):
2681 """get specified tags from the user meta"""
2682 meta = get_user_meta(apiurl, user)
2685 root = ET.fromstring(meta)
2688 if root.find(tag).text != None:
2689 data.append(root.find(tag).text)
2693 except AttributeError:
2694 # this part is reached if the tags tuple contains an invalid tag
2695 print 'The xml file for user \'%s\' seems to be broken' % user
2700 def get_source_file(apiurl, prj, package, filename, targetfilename=None, revision=None, progress_obj=None):
2701 import tempfile, shutil
2704 query = { 'rev': revision }
2708 (fd, tmpfile) = tempfile.mkstemp(prefix = filename, suffix = '.osc')
2709 o = os.fdopen(fd, 'wb')
2710 u = makeurl(apiurl, ['source', prj, package, pathname2url(filename)], query=query)
2711 for buf in streamfile(u, http_GET, BUFSIZE, progress_obj=progress_obj):
2714 shutil.move(tmpfile, targetfilename or filename)
2715 os.chmod(targetfilename or filename, 0644)
2723 def get_binary_file(apiurl, prj, repo, arch,
2726 target_filename = None,
2727 target_mtime = None,
2728 progress_meter = False):
2730 target_filename = target_filename or filename
2732 where = package or '_repository'
2733 u = makeurl(apiurl, ['build', prj, repo, arch, where, filename])
2736 sys.stdout.write("Downloading %s [ 0%%]" % filename)
2740 binsize = int(f.headers['content-length'])
2743 (fd, tmpfilename) = tempfile.mkstemp(prefix = filename + '.', suffix = '.osc')
2744 os.chmod(tmpfilename, 0644)
2747 o = os.fdopen(fd, 'wb')
2751 #buf = f.read(BUFSIZE)
2755 downloaded += len(buf)
2757 completion = str(int((float(downloaded)/binsize)*100))
2758 sys.stdout.write('%s%*s%%]' % ('\b'*5, 3, completion))
2763 sys.stdout.write('\n')
2765 shutil.move(tmpfilename, target_filename)
2767 os.utime(target_filename, (-1, target_mtime))
2769 # make sure that the temp file is cleaned up when we are interrupted
2771 try: os.unlink(tmpfilename)
2774 def dgst_from_string(str):
2775 # Python 2.5 depracates the md5 modules
2776 # Python 2.4 doesn't have hashlib yet
2779 md5_hash = hashlib.md5()
2782 md5_hash = md5.new()
2783 md5_hash.update(str)
2784 return md5_hash.hexdigest()
2788 #if not os.path.exists(file):
2798 f = open(file, 'rb')
2800 buf = f.read(BUFSIZE)
2803 return s.hexdigest()
2808 """return true if a string is binary data using diff's heuristic"""
2809 if s and '\0' in s[:4096]:
2814 def binary_file(fn):
2815 """read 4096 bytes from a file named fn, and call binary() on the data"""
2816 return binary(open(fn, 'rb').read(4096))
2819 def get_source_file_diff(dir, filename, rev, oldfilename = None, olddir = None, origfilename = None):
2821 This methods diffs oldfilename against filename (so filename will
2822 be shown as the new file).
2823 The variable origfilename is used if filename and oldfilename differ
2824 in their names (for instance if a tempfile is used for filename etc.)
2830 oldfilename = filename
2833 olddir = os.path.join(dir, store)
2835 if not origfilename:
2836 origfilename = filename
2838 file1 = os.path.join(olddir, oldfilename) # old/stored original
2839 file2 = os.path.join(dir, filename) # working copy
2841 f1 = open(file1, 'rb')
2845 f2 = open(file2, 'rb')
2849 if binary(s1) or binary (s2):
2850 d = ['Binary file %s has changed\n' % origfilename]
2853 d = difflib.unified_diff(\
2856 fromfile = '%s\t(revision %s)' % (origfilename, rev), \
2857 tofile = '%s\t(working copy)' % origfilename)
2859 # if file doesn't end with newline, we need to append one in the diff result
2861 for i, line in enumerate(d):
2862 if not line.endswith('\n'):
2863 d[i] += '\n\\ No newline at end of file'
2869 def make_diff(wc, revision):
2875 diff_hdr = 'Index: %s\n'
2876 diff_hdr += '===================================================================\n'
2878 olddir = os.getcwd()
2882 for file in wc.todo:
2883 if file in wc.filenamelist+wc.filenamelist_unvers:
2884 state = wc.status(file)
2886 added_files.append(file)
2888 removed_files.append(file)
2889 elif state == 'M' or state == 'C':
2890 changed_files.append(file)
2892 diff.append('osc: \'%s\' is not under version control' % file)
2894 for file in wc.filenamelist+wc.filenamelist_unvers:
2895 state = wc.status(file)
2896 if state == 'M' or state == 'C':
2897 changed_files.append(file)
2899 added_files.append(file)
2901 removed_files.append(file)
2903 tmpdir = tempfile.mkdtemp(str(revision), wc.name)
2905 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, revision)
2906 cmp_pac = Package(tmpdir)
2908 for file in wc.todo:
2909 if file in cmp_pac.filenamelist:
2910 if file in wc.filenamelist:
2911 changed_files.append(file)
2913 diff.append('osc: \'%s\' is not under version control' % file)
2915 diff.append('osc: unable to find \'%s\' in revision %s' % (file, cmp_pac.rev))
2917 changed_files, added_files, removed_files = wc.comparePac(cmp_pac)
2919 for file in changed_files:
2920 diff.append(diff_hdr % file)
2922 diff.append(get_source_file_diff(wc.absdir, file, wc.rev))
2924 cmp_pac.updatefile(file, revision)
2925 diff.append(get_source_file_diff(wc.absdir, file, revision, file,
2926 cmp_pac.absdir, file))
2927 (fd, tmpfile) = tempfile.mkstemp()
2928 for file in added_files:
2929 diff.append(diff_hdr % file)
2931 diff.append(get_source_file_diff(wc.absdir, file, wc.rev, os.path.basename(tmpfile),
2932 os.path.dirname(tmpfile), file))
2934 diff.append(get_source_file_diff(wc.absdir, file, revision, os.path.basename(tmpfile),
2935 os.path.dirname(tmpfile), file))
2937 # FIXME: this is ugly but it cannot be avoided atm
2938 # if a file is deleted via "osc rm file" we should keep the storefile.
2940 if cmp_pac == None and removed_files:
2941 tmpdir = tempfile.mkdtemp()
2943 init_package_dir(wc.apiurl, wc.prjname, wc.name, tmpdir, wc.rev)
2944 tmp_pac = Package(tmpdir)
2947 for file in removed_files:
2948 diff.append(diff_hdr % file)
2950 tmp_pac.updatefile(file, tmp_pac.rev)
2951 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2952 wc.rev, file, tmp_pac.storedir, file))
2954 cmp_pac.updatefile(file, revision)
2955 diff.append(get_source_file_diff(os.path.dirname(tmpfile), os.path.basename(tmpfile),
2956 revision, file, cmp_pac.storedir, file))
2960 delete_dir(cmp_pac.absdir)
2962 delete_dir(tmp_pac.absdir)
2966 def server_diff(apiurl,
2967 old_project, old_package, old_revision,
2968 new_project, new_package, new_revision, unified=False):
2970 query = {'cmd': 'diff', 'expand': '1'}
2972 query['oproject'] = old_project
2974 query['opackage'] = old_package
2976 query['orev'] = old_revision
2978 query['rev'] = new_revision
2980 query['unified'] = 1
2982 u = makeurl(apiurl, ['source', new_project, new_package], query=query)
2988 def make_dir(apiurl, project, package, pathname=None, prj_dir=None):
2990 creates the plain directory structure for a package dir.
2991 The 'apiurl' parameter is needed for the project dir initialization.
2992 The 'project' and 'package' parameters specify the name of the
2993 project and the package. The optional 'pathname' parameter is used
2994 for printing out the message that a new dir was created (default: 'prj_dir/package').
2995 The optional 'prj_dir' parameter specifies the path to the project dir (default: 'project').
2997 prj_dir = prj_dir or project
2999 # FIXME: carefully test each patch component of prj_dir,
3000 # if we have a .osc/_files entry at that level.
3001 # -> if so, we have a package/project clash,
3002 # and should rename this path component by appending '.proj'
3003 # and give user a warning message, to discourage such clashes
3005 pathname = pathname or getTransActPath(os.path.join(prj_dir, package))
3006 if is_package_dir(prj_dir):
3007 # we want this to become a project directory,
3008 # but it already is a package directory.
3009 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving myself away not implemented')
3011 if not is_project_dir(prj_dir):
3012 # this directory could exist as a parent direory for one of our earlier
3013 # checked out sub-projects. in this case, we still need to initialize it.
3014 print statfrmt('A', prj_dir)
3015 init_project_dir(apiurl, prj_dir, project)
3017 if is_project_dir(os.path.join(prj_dir, package)):
3018 # the thing exists, but is a project directory and not a package directory
3019 # FIXME: this should be a warning message to discourage package/project clashes
3020 raise oscerr.OscIOError(None, 'checkout_package: package/project clash. Moving project away not implemented')
3022 if not os.path.exists(os.path.join(prj_dir, package)):
3023 print statfrmt('A', pathname)
3024 os.mkdir(os.path.join(prj_dir, package))
3025 os.mkdir(os.path.join(prj_dir, package, store))
3027 return(os.path.join(prj_dir, package))
3030 def checkout_package(apiurl, project, package,
3031 revision=None, pathname=None, prj_obj=None,
3032 expand_link=False, prj_dir=None, service_files=None, progress_obj=None):
3034 # the project we're in might be deleted.
3035 # that'll throw an error then.
3036 olddir = os.getcwd()
3038 olddir = os.environ.get("PWD")
3043 if sys.platform[:3] == 'win':
3044 prj_dir = prj_dir[:2] + prj_dir[2:].replace(':', ';')
3046 if conf.config['checkout_no_colon']:
3047 prj_dir = prj_dir.replace(':', '/')
3050 pathname = getTransActPath(os.path.join(prj_dir, package))
3052 # before we create directories and stuff, check if the package actually
3054 show_package_meta(apiurl, project, package)
3058 # try to read from the linkinfo
3059 # if it is a link we use the xsrcmd5 as the revision to be
3062 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision)
3064 x = show_upstream_xsrcmd5(apiurl, project, package, revision=revision, linkrev='base')
3069 os.chdir(make_dir(apiurl, project, package, pathname, prj_dir))
3070 init_package_dir(apiurl, project, package, store, revision)
3072 p = Package(package, progress_obj=progress_obj)
3075 for filename in p.filenamelist:
3076 if service_files or not filename.startswith('_service:'):
3077 p.updatefile(filename, revision)
3078 # print 'A ', os.path.join(project, package, filename)
3079 print statfrmt('A', os.path.join(pathname, filename))
3080 if conf.config['do_package_tracking']:
3081 # check if we can re-use an existing project object
3083 prj_obj = Project(os.getcwd())
3084 prj_obj.set_state(p.name, ' ')
3085 prj_obj.write_packages()
3089 def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False,
3090 dst_userid = None, keep_develproject = False):
3092 update pkgmeta with new new_name and new_prj and set calling user as the
3093 only maintainer (unless keep_maintainers is set). Additionally remove the
3094 develproject entry (<devel />) unless keep_develproject is true.
3096 root = ET.fromstring(''.join(pkgmeta))
3097 root.set('name', new_name)
3098 root.set('project', new_prj)
3099 if not keep_maintainers:
3100 for person in root.findall('person'):
3102 if not keep_develproject:
3103 for dp in root.findall('devel'):
3105 return ET.tostring(root)
3107 def link_to_branch(apiurl, project, package):
3109 convert a package with a _link + project.diff to a branch
3112 if '_link' in meta_get_filelist(apiurl, project, package):
3113 u = makeurl(apiurl, ['source', project, package], 'cmd=linktobranch')
3116 raise oscerr.OscIOError(None, 'no _link file inside project \'%s\' package \'%s\'' % (project, package))
3118 def link_pac(src_project, src_package, dst_project, dst_package, force, rev='', cicount='', disable_publish = False):
3120 create a linked package
3121 - "src" is the original package
3122 - "dst" is the "link" package that we are creating here
3127 dst_meta = meta_exists(metatype='pkg',
3128 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3130 create_new=False, apiurl=conf.config['apiurl'])
3132 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3133 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3138 root = ET.fromstring(''.join(dst_meta))
3139 elm = root.find('publish')
3141 elm = ET.SubElement(root, 'publish')
3143 ET.SubElement(elm, 'disable')
3144 dst_meta = ET.tostring(root)
3147 path_args=(dst_project, dst_package),
3149 # create the _link file
3150 # but first, make sure not to overwrite an existing one
3151 if '_link' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3153 print >>sys.stderr, 'forced overwrite of existing _link file'
3156 print >>sys.stderr, '_link file already exists...! Aborting'
3160 rev = 'rev="%s"' % rev
3165 cicount = 'cicount="%s"' % cicount
3169 print 'Creating _link...',
3170 link_template = """\
3171 <link project="%s" package="%s" %s %s>
3173 <!-- <apply name="patch" /> apply a patch on the source directory -->
3174 <!-- <topadd>%%define build_with_feature_x 1</topadd> add a line on the top (spec file only) -->
3175 <!-- <add>file.patch</add> add a patch to be applied after %%setup (spec file only) -->
3176 <!-- <delete>filename</delete> delete a file -->
3179 """ % (src_project, src_package, rev, cicount)
3181 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_link'])
3182 http_PUT(u, data=link_template)
3185 def aggregate_pac(src_project, src_package, dst_project, dst_package, repo_map = {}, disable_publish = False):
3188 - "src" is the original package
3189 - "dst" is the "aggregate" package that we are creating here
3190 - "map" is a dictionary SRC => TARGET repository mappings
3195 dst_meta = meta_exists(metatype='pkg',
3196 path_args=(quote_plus(dst_project), quote_plus(dst_package)),
3198 create_new=False, apiurl=conf.config['apiurl'])
3200 src_meta = show_package_meta(conf.config['apiurl'], src_project, src_package)
3201 dst_meta = replace_pkg_meta(src_meta, dst_package, dst_project)
3206 root = ET.fromstring(''.join(dst_meta))
3207 elm = root.find('publish')
3209 elm = ET.SubElement(root, 'publish')
3211 ET.SubElement(elm, 'disable')
3212 dst_meta = ET.tostring(root)
3215 path_args=(dst_project, dst_package),
3218 # create the _aggregate file
3219 # but first, make sure not to overwrite an existing one
3220 if '_aggregate' in meta_get_filelist(conf.config['apiurl'], dst_project, dst_package):
3222 print >>sys.stderr, '_aggregate file already exists...! Aborting'
3225 print 'Creating _aggregate...',
3226 aggregate_template = """\
3228 <aggregate project="%s">
3230 for tgt, src in repo_map.iteritems():
3231 aggregate_template += """\
3232 <repository target="%s" source="%s" />
3235 aggregate_template += """\
3236 <package>%s</package>
3239 """ % ( src_package)
3241 u = makeurl(conf.config['apiurl'], ['source', dst_project, dst_package, '_aggregate'])
3242 http_PUT(u, data=aggregate_template)
3246 def attribute_branch_pkg(apiurl, attribute, maintained_update_project_attribute, package, targetproject, return_existing=False):
3248 Branch packages defined via attributes (via API call)
3250 query = { 'cmd': 'branch' }
3251 query['attribute'] = attribute
3253 query['target_project'] = targetproject
3255 query['package'] = package
3256 if maintained_update_project_attribute:
3257 query['update_project_attribute'] = maintained_update_project_attribute
3259 u = makeurl(apiurl, ['source'], query=query)
3263 except urllib2.HTTPError, e:
3264 msg = ''.join(e.readlines())
3265 msg = msg.split('<summary>')[1]
3266 msg = msg.split('</summary>')[0]
3267 m = re.match(r"attribute branch call failed: (\S+)/", msg)
3271 r = r.split('targetproject">')[1]
3272 r = r.split('</data>')[0]
3276 def branch_pkg(apiurl, src_project, src_package, nodevelproject=False, rev=None, target_project=None, target_package=None, return_existing=False):
3278 Branch a package (via API call)
3280 query = { 'cmd': 'branch' }
3282 query['ignoredevel'] = '1'
3286 query['target_project'] = target_project
3288 query['target_package'] = target_package
3289 u = makeurl(apiurl, ['source', src_project, src_package], query=query)
3292 except urllib2.HTTPError, e:
3293 if not return_existing:
3295 msg = ''.join(e.readlines())
3296 msg = msg.split('<summary>')[1]
3297 msg = msg.split('</summary>')[0]
3298 m = re.match(r"branch target package already exists: (\S+)/(\S+)", msg)
3302 return (True, m.group(1), m.group(2), None, None)
3305 for i in ET.fromstring(f.read()).findall('data'):
3306 data[i.get('name')] = i.text
3307 return (False, data.get('targetproject', None), data.get('targetpackage', None),
3308 data.get('sourceproject', None), data.get('sourcepackage', None))
3311 def copy_pac(src_apiurl, src_project, src_package,
3312 dst_apiurl, dst_project, dst_package,
3313 client_side_copy = False,
3314 keep_maintainers = False,
3315 keep_develproject = False,
3320 Create a copy of a package.
3322 Copying can be done by downloading the files from one package and commit
3323 them into the other by uploading them (client-side copy) --
3324 or by the server, in a single api call.
3327 src_meta = show_package_meta(src_apiurl, src_project, src_package)
3328 dst_userid = conf.get_apiurl_usr(dst_apiurl)
3329 src_meta = replace_pkg_meta(src_meta, dst_package, dst_project, keep_maintainers,
3330 dst_userid, keep_develproject)
3332 print 'Sending meta data...'
3333 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, '_meta'])
3334 http_PUT(u, data=src_meta)
3336 print 'Copying files...'
3337 if not client_side_copy:
3338 query = {'cmd': 'copy', 'oproject': src_project, 'opackage': src_package }
3340 query['expand'] = '1'
3342 query['orev'] = revision
3344 query['comment'] = comment
3345 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3350 # copy one file after the other
3352 tmpdir = tempfile.mkdtemp(prefix='osc_copypac')
3354 query = {'rev': 'upload'}
3355 for n in meta_get_filelist(src_apiurl, src_project, src_package, expand=expand):
3357 get_source_file(src_apiurl, src_project, src_package, n, targetfilename=n, revision=revision)
3358 u = makeurl(dst_apiurl, ['source', dst_project, dst_package, pathname2url(n)], query=query)
3359 http_PUT(u, file = n)
3362 query['comment'] = comment
3363 query['cmd'] = 'commit'
3364 u = makeurl(dst_apiurl, ['source', dst_project, dst_package], query=query)
3370 def delete_package(apiurl, prj, pac):
3371 u = makeurl(apiurl, ['source', prj, pac])
3375 def delete_project(apiurl, prj):