- Added priority option for repos to support package installations of older packages...
[meego-developer-tools:image-creator.git] / mic / utils / misc.py
1 #
2 # misc.py : miscellaneous utilities
3 #
4 # Copyright 2010, Intel Inc.
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 of the License.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 # GNU Library General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18
19
20 import os
21 import sys
22 import subprocess
23 import logging
24 import tempfile
25 import re
26 import shutil
27 import glob
28 import xml.dom.minidom
29 import hashlib
30 import urlparse
31 import locale
32 import codecs
33
34 try:
35     import sqlite3 as sqlite
36 except ImportError:
37     import sqlite
38 import _sqlitecache
39
40 try:
41     from xml.etree import cElementTree
42 except ImportError:
43     import cElementTree
44 xmlparse = cElementTree.parse
45
46 from mic.imgcreate.errors import *
47 from mic.imgcreate.fs import *
48
49 def setlocale():
50     try:
51         locale.setlocale(locale.LC_ALL,'')
52     except locale.Error:
53         os.environ['LC_ALL'] = 'C'
54         locale.setlocale(locale.LC_ALL,'C')
55     sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
56     sys.stdout.errors = 'replace'
57
58 def get_extension_name(path):
59     match = re.search("(?<=\.)\w+$", path)
60     if match:
61         return match.group(0)
62     else:
63         return None
64
65 def get_image_type(path):
66     if os.path.isdir(path):
67         if ismeego(path):
68             return "fs"
69         return None
70     maptab = {"raw":"raw", "vmdk":"vmdk", "vdi":"vdi", "iso":"livecd", "usbimg":"liveusb"}
71     extension = get_extension_name(path)
72     if extension in ("raw", "vmdk", "vdi", "iso", "usbimg"):
73         return maptab[extension]
74
75     fd = open(path, "rb")
76     file_header = fd.read(1024)
77     fd.close()
78     vdi_flag = "<<< Sun VirtualBox Disk Image >>>"
79     if file_header[0:len(vdi_flag)] == vdi_flag:
80         return maptab["vdi"]
81
82     dev_null = os.open("/dev/null", os.O_WRONLY)
83     filecmd = find_binary_path("file")
84     args = [ filecmd, path ]
85     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
86     output = file.communicate()[0]
87     os.close(dev_null)
88     isoptn = re.compile(r".*ISO 9660 CD-ROM filesystem.*(bootable).*")
89     usbimgptn = re.compile(r".*x86 boot sector.*active.*")
90     rawptn = re.compile(r".*x86 boot sector.*")
91     vmdkptn = re.compile(r".*VMware. disk image.*")
92     ext3fsimgptn = re.compile(r".*Linux.*ext3 filesystem data.*")
93     if isoptn.match(output):
94         return maptab["iso"]
95     elif usbimgptn.match(output):
96         return maptab["usbimg"]
97     elif rawptn.match(output):
98         return maptab["raw"]
99     elif vmdkptn.match(output):
100         return maptab["vmdk"]
101     elif ext3fsimgptn.match(output):
102         return "ext3fsimg"
103     else:
104         return None
105
106 def fstype_is_btrfs(img):
107     dev_null = os.open("/dev/null", os.O_WRONLY)
108     filecmd = find_binary_path("file")
109     args = [ filecmd, img ]
110     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
111     output = file.communicate()[0]
112     os.close(dev_null)
113     btrfslike = re.compile(r".*BTRFS.*")
114     if btrfslike.match(output):
115         return True
116     return False
117
118 def get_file_size(file):
119     """Return size in MB unit"""
120     du = find_binary_path("du")
121     dev_null = os.open("/dev/null", os.O_WRONLY)
122     duProc = subprocess.Popen([du, "-s", "-b", "-B", "1M", file],
123                                stdout=subprocess.PIPE, stderr=dev_null)
124     duOutput = duProc.communicate()[0]
125     if duProc.returncode:
126         raise CreatorError("Failed to run %s" % du)
127
128     size1 = int(duOutput.split()[0])
129     duProc = subprocess.Popen([du, "-s", "-B", "1M", file],
130                                stdout=subprocess.PIPE, stderr=dev_null)
131     duOutput = duProc.communicate()[0]
132     if duProc.returncode:
133         raise CreatorError("Failed to run %s" % du)
134
135     size2 = int(duOutput.split()[0])
136     os.close(dev_null)
137     if size1 > size2:
138         return size1
139     else:
140         return size2
141
142 def get_filesystem_avail(fs):
143     vfstat = os.statvfs(fs)
144     return vfstat.f_bavail * vfstat.f_bsize
145
146 def convert_image(srcimg, srcfmt, dstimg, dstfmt):
147     #convert disk format
148     if dstfmt != "raw":
149         raise CreatorError("Invalid destination image format: %s" % dstfmt)
150     logging.debug("converting %s image to %s" % (srcimg, dstimg))
151     if srcfmt == "vmdk":
152         path = find_binary_path("qemu-img")
153         argv = [path, "convert", "-f", "vmdk", srcimg, "-O", dstfmt,  dstimg]
154     elif srcfmt == "vdi":
155         path = find_binary_path("VBoxManage")
156         argv = [path, "internalcommands", "converttoraw", srcimg, dstimg]
157     else:
158         raise CreatorError("Invalid soure image format: %s" % srcfmt)
159
160     rc = subprocess.call(argv)
161     if rc == 0:
162         logging.debug("convert successful")
163     if rc != 0:
164         raise CreatorError("Unable to convert disk to %s" % dstfmt)
165
166 def myxcopytree(src, dst):
167     dev_null = os.open("/dev/null", os.O_WRONLY)
168     dirnames = os.listdir(src)
169     copycmd = find_binary_path("cp")
170     for dir in dirnames:
171         args = [ copycmd, "-af", src + "/" + dir, dst ]
172         subprocess.call(args, stdout=dev_null, stderr=dev_null)
173     os.close(dev_null)
174     ignores = ["dev/fd", "dev/stdin", "dev/stdout", "dev/stderr", "etc/mtab"]
175     for exclude in ignores:
176         if os.path.exists(dst + "/" + exclude):
177             os.unlink(dst + "/" + exclude)
178
179 def uncompress_squashfs(squashfsimg, outdir):
180     """Uncompress file system from squshfs image"""
181     unsquashfs = find_binary_path("unsquashfs")
182     args = [ unsquashfs, "-d", outdir, squashfsimg ]
183     rc = subprocess.call(args)
184     if (rc != 0):
185         raise SquashfsError("Failed to uncompress %s." % squashfsimg)
186
187 def mkdtemp(dir = "/var/tmp", prefix = "mic-tmp-"):
188     makedirs(dir)
189     return tempfile.mkdtemp(dir = dir, prefix = prefix)
190
191 def ismeego(rootdir):
192     ret = False
193     if (os.path.exists(rootdir + "/etc/moblin-release") \
194        or os.path.exists(rootdir + "/etc/meego-release")) \
195        and glob.glob(rootdir + "/boot/vmlinuz-*"):
196         ret = True
197
198     return ret
199
200
201 def is_meego_bootstrap(rootdir):
202     ret = False
203     if (os.path.exists(rootdir + "/etc/moblin-release") \
204        or os.path.exists(rootdir + "/etc/meego-release")) \
205        and os.path.exists(rootdir + "/usr/bin/python") \
206        and os.path.exists(rootdir + "/usr/bin/mic-image-creator"):
207         ret = True
208
209     return ret
210
211
212 _my_proxies = {}
213 _my_noproxy = None
214 _my_noproxy_list = []
215
216 def set_proxy_environ():
217     global _my_noproxy, _my_proxies
218     if not _my_proxies:
219         return
220     for key in _my_proxies.keys():
221         os.environ[key + "_proxy"] = _my_proxies[key]
222     if not _my_noproxy:
223         return
224     os.environ["no_proxy"] = _my_noproxy
225
226 def unset_proxy_environ():
227    if os.environ.has_key("http_proxy"):
228        del os.environ["http_proxy"]
229    if os.environ.has_key("https_proxy"):
230        del os.environ["https_proxy"]
231    if os.environ.has_key("ftp_proxy"):
232        del os.environ["ftp_proxy"]
233    if os.environ.has_key("all_proxy"):
234        del os.environ["all_proxy"]
235    if os.environ.has_key("no_proxy"):
236        del os.environ["no_proxy"]
237    if os.environ.has_key("HTTP_PROXY"):
238        del os.environ["HTTP_PROXY"]
239    if os.environ.has_key("HTTPS_PROXY"):
240        del os.environ["HTTPS_PROXY"]
241    if os.environ.has_key("FTP_PROXY"):
242        del os.environ["FTP_PROXY"]
243    if os.environ.has_key("ALL_PROXY"):
244        del os.environ["ALL_PROXY"]
245    if os.environ.has_key("NO_PROXY"):
246        del os.environ["NO_PROXY"]
247
248 def _set_proxies(proxy = None, no_proxy = None):
249     """Return a dictionary of scheme -> proxy server URL mappings."""
250     global _my_noproxy, _my_proxies
251     _my_proxies = {}
252     _my_noproxy = None
253     proxies = []
254     if proxy:
255        proxies.append(("http_proxy", proxy))
256     if no_proxy:
257        proxies.append(("no_proxy", no_proxy))
258
259     """Get proxy settings from environment variables if not provided"""
260     if not proxy and not no_proxy:
261        proxies = os.environ.items()
262
263        """ Remove proxy env variables, urllib2 can't handle them correctly """
264        unset_proxy_environ()
265
266     for name, value in proxies:
267         name = name.lower()
268         if value and name[-6:] == '_proxy':
269             if name[0:2] != "no":
270                 _my_proxies[name[:-6]] = value
271             else:
272                 _my_noproxy = value
273
274 def ip_to_int(ip):
275     ipint=0
276     shift=24
277     for dec in ip.split("."):
278         ipint |= int(dec) << shift
279         shift -= 8
280     return ipint
281
282 def int_to_ip(val):
283     ipaddr=""
284     shift=0
285     for i in range(4):
286         dec = val >> shift
287         dec &= 0xff
288         ipaddr = ".%d%s" % (dec, ipaddr)
289         shift += 8
290     return ipaddr[1:]
291
292 def isip(host):
293     if host.replace(".", "").isdigit():
294         return True
295     return False
296
297 def set_noproxy_list():
298     global _my_noproxy, _my_noproxy_list
299     _my_noproxy_list = []
300     if not _my_noproxy:
301         return
302     for item in _my_noproxy.split(","):
303         item = item.strip()
304         if not item:
305             continue
306         if item[0] != '.' and item.find("/") == -1:
307             """ Need to match it """
308             _my_noproxy_list.append({"match":0,"needle":item})
309         elif item[0] == '.':
310             """ Need to match at tail """
311             _my_noproxy_list.append({"match":1,"needle":item})
312         elif item.find("/") > 3:
313             """ IP/MASK, need to match at head """
314             needle = item[0:item.find("/")].strip()
315             ip = ip_to_int(needle)
316             netmask = 0
317             mask = item[item.find("/")+1:].strip()
318
319             if mask.isdigit():
320                 netmask = int(mask)
321                 netmask = ~((1<<(32-netmask)) - 1)
322                 ip &= netmask
323             else:
324                 shift=24
325                 netmask=0
326                 for dec in mask.split("."):
327                     netmask |= int(dec) << shift
328                     shift -= 8
329                 ip &= netmask
330             _my_noproxy_list.append({"match":2,"needle":ip,"netmask":netmask})
331
332 def isnoproxy(url):
333     (scheme, host, path, parm, query, frag) = urlparse.urlparse(url)
334     if '@' in host:
335         user_pass, host = host.split('@', 1)
336     if ':' in host:
337         host, port = host.split(':', 1)
338     hostisip = isip(host)
339     for item in _my_noproxy_list:
340         if hostisip and item["match"] <= 1:
341             continue
342         if item["match"] == 2 and hostisip:
343             if (ip_to_int(host) & item["netmask"]) == item["needle"]:
344                 return True
345         if item["match"] == 0:
346             if host == item["needle"]:
347                 return True
348         if item["match"] == 1:
349             if host.rfind(item["needle"]) > 0:
350                 return True
351     return False
352
353 def set_proxies(proxy = None, no_proxy = None):
354     _set_proxies(proxy, no_proxy)
355     set_noproxy_list()
356
357 def get_proxy(url):
358     if url[0:4] == "file" or isnoproxy(url):
359         return None
360     type = url[0:url.index(":")]
361     proxy = None
362     if _my_proxies.has_key(type):
363         proxy = _my_proxies[type]
364     elif _my_proxies.has_key("http"):
365         proxy = _my_proxies["http"]
366     else:
367         proxy = None
368     return proxy
369
370 def remap_repostr(repostr, siteconf):
371     items = repostr.split(",")
372     name = None
373     baseurl = None
374     for item in items:
375         subitems = item.split(":")
376         if subitems[0] == "name":
377             name = subitems[1]
378         if subitems[0] == "baseurl":
379             baseurl = item[8:]
380     if not baseurl:
381         baseurl = repostr
382
383     for section in siteconf._sections:
384         if section != "main":
385             if not siteconf.has_option(section, "enabled") or siteconf.get(section, "enabled") == "0":
386                 continue
387             if siteconf.has_option(section, "equalto"):
388                 equalto = siteconf.get(section, "equalto")
389                 if (name and equalto == name) or (baseurl and equalto == baseurl):
390                     remap_baseurl = siteconf.get(section, "baseurl")
391                     repostr = repostr.replace(baseurl, remap_baseurl)
392                     return repostr
393
394     return repostr
395
396
397 def get_temp_reponame(baseurl):
398     md5obj = hashlib.md5(baseurl)
399     tmpreponame = "%s" % md5obj.hexdigest()
400     return tmpreponame
401
402 def get_repostr(repo, siteconf = None):
403     if siteconf:
404         repo = remap_repostr(repo, siteconf)
405     keys = ("baseurl", "mirrorlist", "name", "cost", "includepkgs", "excludepkgs", "proxy", "save", "proxyuser", "proxypasswd", "debuginfo", "source", "gpgkey", "cost", "priority")
406     repostr = "repo"
407     items = repo.split(",")
408     if len(items) == 1:
409         subitems = items[0].split(":")
410         if len(subitems) == 1:
411             url = subitems[0]
412             repostr += " --baseurl=%s" % url
413         elif subitems[0] == "baseurl":
414             url = items[0][8:]
415             repostr += " --baseurl=%s" % url
416         elif subitems[0] in ("http", "ftp", "https", "ftps", "file"):
417             url = items[0]
418             repostr += " --baseurl=%s" % url
419         else:
420             raise ValueError("Invalid repo string")
421         if url.find("://") == -1 \
422            or url[0:url.index("://")] not in ("http", "ftp", "https", "ftps", "file") \
423            or url.find("/", url.index("://")+3) == -1:
424             raise ValueError("Invalid repo string")
425     else:
426         if repo.find("baseurl:") == -1 and repo.find("mirrorlist:") == -1:
427             raise ValueError("Invalid repo string")
428         url = None
429         for item in items:
430             if not item:
431                 continue
432             subitems = item.split(":")
433             if subitems[0] in keys:
434                 if subitems[0] in ("baseurl", "mirrorlist"):
435                     url = item[len(subitems[0])+1:]
436                 if subitems[0] in ("save", "debuginfo", "source"):
437                     repostr += " --%s" % subitems[0]
438                 elif subitems[0] in ("includepkgs", "excludepkgs"):
439                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:].replace(";", ","))
440                 else:
441                     repostr += " --%s=%s" % (subitems[0], item[len(subitems[0])+1:])
442             else:
443                 raise ValueError("Invalid repo string")
444     if url.find("://") != -1 \
445        and url[0:url.index("://")] in ("http", "ftp", "https", "ftps", "file") \
446        and url.find("/", url.index("://")+3) != -1:
447         if repostr.find("--proxy=") == -1:
448             proxy = get_proxy(url)
449             if proxy:
450                 repostr += " --proxy=%s" % proxy
451     else:
452         raise ValueError("Invalid repo string")
453
454     if repostr.find("--name=") == -1:
455         repostr += " --name=%s" % get_temp_reponame(url)
456
457     return repostr
458
459 DEFAULT_SITECONF_GLOBAL="/etc/mic2/mic2.conf"
460 DEFAULT_SITECONF_USER=os.path.expanduser("~/.mic2.conf")
461
462 def read_siteconf(siteconf = None):
463     from ConfigParser import SafeConfigParser
464
465     my_siteconf_parser = SafeConfigParser()
466
467     siteconfigs = [DEFAULT_SITECONF_GLOBAL,DEFAULT_SITECONF_USER]
468
469     if siteconf:
470         """ cmdline siteconfig always overrides all the others. """
471         siteconfigs = [siteconf]
472
473     readsiteconfigs = my_siteconf_parser.read(siteconfigs)
474
475     if readsiteconfigs:
476         print "Read following site configs: %s" % (siteconfigs)
477
478     return my_siteconf_parser
479
480 def output_siteconf(siteconf):
481     if not siteconf:
482         return None
483
484     output = ""
485     for section in siteconf.sections():
486         output += "[%s]\n" % section
487         for option in siteconf.options(section):
488             output += "%s=%s\n" % (option, siteconf.get(section, option))
489         output += "\n\n"
490
491     return output
492
493 def get_repostrs_from_ks(ks):
494     kickstart_repos = []
495     for repodata in ks.handler.repo.repoList:
496         repostr = ""
497         if hasattr(repodata, "name") and repodata.name:
498             repostr += ",name:" + repodata.name
499         if hasattr(repodata, "baseurl") and repodata.baseurl:
500             repostr += ",baseurl:" + repodata.baseurl
501         if hasattr(repodata, "mirrorlist") and repodata.mirrorlist:
502             repostr += ",mirrorlist:" + repodata.mirrorlist
503         if hasattr(repodata, "includepkgs") and repodata.includepkgs:
504             repostr += ",includepkgs:" + ";".join(repodata.includepkgs)
505         if hasattr(repodata, "excludepkgs") and repodata.excludepkgs:
506             repostr += ",excludepkgs:" + ";".join(repodata.excludepkgs)
507         if hasattr(repodata, "cost") and repodata.cost:
508             repostr += ",cost:%d" % repodata.cost
509         if hasattr(repodata, "save") and repodata.save:
510             repostr += ",save:"
511         if hasattr(repodata, "proxy") and repodata.proxy:
512             repostr += ",proxy:" + repodata.proxy
513         if hasattr(repodata, "proxyuser") and repodata.proxy_username:
514             repostr += ",proxyuser:" + repodata.proxy_username
515         if  hasattr(repodata, "proxypasswd") and repodata.proxy_password:
516             repostr += ",proxypasswd:" + repodata.proxy_password
517         if repostr.find("name:") == -1:
518             repostr = ",name:%s" % get_temp_reponame(repodata.baseurl)
519         if hasattr(repodata, "debuginfo") and repodata.debuginfo:
520             repostr += ",debuginfo:"
521         if hasattr(repodata, "source") and repodata.source:
522             repostr += ",source:"
523         if hasattr(repodata, "gpgkey") and repodata.gpgkey:
524             repostr += ",gpgkey:" + repodata.gpgkey
525         if hasattr(repodata, "priority") and repodata.priority:
526             repostr += ",priority:%d" % repodata.priority
527         kickstart_repos.append(repostr[1:])
528     return kickstart_repos
529
530 def get_repostrs_from_siteconf(siteconf):
531     site_repos = []
532     if not siteconf:
533         return site_repos
534
535     for section in siteconf._sections:
536         if section != "main":
537             repostr = ""
538             if siteconf.has_option(section, "enabled") \
539                and siteconf.get(section, "enabled") == "1" \
540                and (not siteconf.has_option(section, "equalto") or not siteconf.get(section, "equalto")):
541                 if siteconf.has_option(section, "name") and siteconf.get(section, "name"):
542                     repostr += ",name:%s" % siteconf.get(section, "name")
543                 if siteconf.has_option(section, "baseurl") and siteconf.get(section, "baseurl"):
544                     repostr += ",baseurl:%s" % siteconf.get(section, "baseurl")
545                 if siteconf.has_option(section, "mirrorlist") and siteconf.get(section, "mirrorlist"):
546                     repostr += ",mirrorlist:%s" % siteconf.get(section, "mirrorlist")
547                 if siteconf.has_option(section, "includepkgs") and siteconf.get(section, "includepkgs"):
548                     repostr += ",includepkgs:%s" % siteconf.get(section, "includepkgs").replace(",", ";")
549                 if siteconf.has_option(section, "excludepkgs") and siteconf.get(section, "excludepkgs"):
550                     repostr += ",excludepkgs:%s" % siteconf.get(section, "excludepkgs").replace(",", ";")
551                 if siteconf.has_option(section, "cost") and siteconf.get(section, "cost"):
552                     repostr += ",cost:%s" % siteconf.get(section, "cost")
553                 if siteconf.has_option(section, "priority") and siteconf.get(section, "priority"):
554                     repostr += ",priority:%s" % siteconf.get(section, "priority")
555                 if siteconf.has_option(section, "save") and siteconf.get(section, "save"):
556                     repostr += ",save:"
557                 if siteconf.has_option(section, "proxy") and siteconf.get(section, "proxy"):
558                     repostr += ",proxy:%s" % siteconf.get(section, "proxy")
559                 if siteconf.has_option(section, "proxy_username") and siteconf.get(section, "proxy_username"):
560                     repostr += ",proxyuser:%s" % siteconf.get(section, "proxy_username")
561                 if siteconf.has_option(section, "proxy_password") and siteconf.get(section, "proxy_password"):
562                     repostr += ",proxypasswd:%s" % siteconf.get(section, "proxy_password")
563             if repostr != "":
564                 if repostr.find("name:") == -1:
565                     repostr = ",name:%s" % get_temp_reponame()
566                 site_repos.append(repostr[1:])
567     return site_repos
568
569 def get_uncompressed_data_from_url(url, filename, proxies):
570     filename = myurlgrab(url, filename, proxies)
571     suffix = None
572     if filename.endswith(".gz"):
573         suffix = ".gz"
574         gunzip = find_binary_path('gunzip')
575         subprocess.call([gunzip, "-f", filename])
576     elif filename.endswith(".bz2"):
577         suffix = ".bz2"
578         bunzip2 = find_binary_path('bunzip2')
579         subprocess.call([bunzip2, "-f", filename])
580     if suffix:
581         filename = filename.replace(suffix, "")
582     return filename
583
584 def get_metadata_from_repo(baseurl, proxies, cachedir, reponame, filename):
585     url = str(baseurl + "/" + filename)
586     filename_tmp = str("%s/%s/%s" % (cachedir, reponame, os.path.basename(filename)))
587     return get_uncompressed_data_from_url(url,filename_tmp,proxies)
588
589 def get_metadata_from_repos(repostrs, cachedir):
590     if not cachedir:
591         CreatorError("No cache dir defined.")
592
593     my_repo_metadata = []
594     for repostr in repostrs:
595         reponame = None
596         baseurl = None
597         proxy = None
598         items = repostr.split(",")
599         for item in items:
600             subitems = item.split(":")
601             if subitems[0] == "name":
602                 reponame = subitems[1]
603             if subitems[0] == "baseurl":
604                 baseurl = item[8:]
605             if subitems[0] == "proxy":
606                 proxy = item[6:]
607             if subitems[0] in ("http", "https", "ftp", "ftps", "file"):
608                 baseurl = item
609         if not proxy:
610             proxy = get_proxy(baseurl)
611         proxies = None
612         if proxy:
613            proxies = {str(proxy.split(":")[0]):str(proxy)}
614         makedirs(cachedir + "/" + reponame)
615         url = str(baseurl + "/repodata/repomd.xml")
616         filename = str("%s/%s/repomd.xml" % (cachedir, reponame))
617         repomd = myurlgrab(url, filename, proxies)
618         try:
619             root = xmlparse(repomd)
620         except SyntaxError:
621             raise CreatorError("repomd.xml syntax error.")
622
623         ns = root.getroot().tag
624         ns = ns[0:ns.rindex("}")+1]
625
626         patterns = None
627         for elm in root.getiterator("%sdata" % ns):
628             if elm.attrib["type"] == "patterns":
629                 patterns = elm.find("%slocation" % ns).attrib['href']
630                 break
631
632         comps = None
633         for elm in root.getiterator("%sdata" % ns):
634             if elm.attrib["type"] == "group_gz":
635                 comps = elm.find("%slocation" % ns).attrib['href']
636                 break
637         if not comps:
638             for elm in root.getiterator("%sdata" % ns):
639                 if elm.attrib["type"] == "group":
640                     comps = elm.find("%slocation" % ns).attrib['href']
641                     break
642
643         primary_type = None
644         for elm in root.getiterator("%sdata" % ns):
645             if elm.attrib["type"] == "primary_db":
646                 primary_type=".sqlite"
647                 break
648
649         if not primary_type:
650             for elm in root.getiterator("%sdata" % ns):
651                 if elm.attrib["type"] == "primary":
652                     primary_type=".xml"
653                     break
654
655         if not primary_type:
656             continue
657
658         primary = elm.find("%slocation" % ns).attrib['href']
659         primary = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, primary)
660
661         if patterns:
662             patterns = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, patterns)
663
664         if comps:
665             comps = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, comps)
666
667         """ Get repo key """
668         try:
669             repokey = get_metadata_from_repo(baseurl, proxies, cachedir, reponame, "repodata/repomd.xml.key")
670         except CreatorError:
671             repokey = None
672             print "Warning: can't get %s/%s" % (baseurl, "repodata/repomd.xml.key")
673
674         my_repo_metadata.append({"name":reponame, "baseurl":baseurl, "repomd":repomd, "primary":primary, "cachedir":cachedir, "proxies":proxies, "patterns":patterns, "comps":comps, "repokey":repokey})
675     return my_repo_metadata
676
677 def get_arch(repometadata):
678     archlist = []
679     for repo in repometadata:
680         if repo["primary"].endswith(".xml"):
681             root = xmlparse(repo["primary"])
682             ns = root.getroot().tag
683             ns = ns[0:ns.rindex("}")+1]
684             for elm in root.getiterator("%spackage" % ns):
685                 if elm.find("%sarch" % ns).text not in ("noarch", "src"):
686                     arch = elm.find("%sarch" % ns).text
687                     if arch not in archlist:
688                         archlist.append(arch)
689         elif repo["primary"].endswith(".sqlite"):
690             con = sqlite.connect(repo["primary"])
691             for row in con.execute("select arch from packages where arch not in (\"src\", \"noarch\")"):
692                 if row[0] not in archlist:
693                     archlist.append(row[0])
694
695             con.close()
696     return archlist
697
698
699 def get_package(pkg, repometadata, arch = None):
700     ver = ""
701     target_repo = None
702     for repo in repometadata:
703         if repo["primary"].endswith(".xml"):
704             root = xmlparse(repo["primary"])
705             ns = root.getroot().tag
706             ns = ns[0:ns.rindex("}")+1]
707             for elm in root.getiterator("%spackage" % ns):
708                 if elm.find("%sname" % ns).text == pkg:
709                     if elm.find("%sarch" % ns).text != "src":
710                         version = elm.find("%sversion" % ns)
711                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
712                         if tmpver > ver:
713                             ver = tmpver
714                             location = elm.find("%slocation" % ns)
715                             pkgpath = "%s" % location.attrib['href']
716                             target_repo = repo
717                         break
718         if repo["primary"].endswith(".sqlite"):
719             con = sqlite.connect(repo["primary"])
720             if not arch:
721                 for row in con.execute("select version, release,location_href from packages where name = \"%s\" and arch != \"src\"" % pkg):
722                     tmpver = "%s-%s" % (row[0], row[1])
723                     if tmpver > ver:
724                         pkgpath = "%s" % row[2]
725                         target_repo = repo
726                     break
727             else:
728                 for row in con.execute("select version, release,location_href from packages where name = \"%s\"" % pkg):
729                     tmpver = "%s-%s" % (row[0], row[1])
730                     if tmpver > ver:
731                         pkgpath = "%s" % row[2]
732                         target_repo = repo
733                     break
734             con.close()
735     if target_repo: 
736         makedirs("%s/%s/packages" % (target_repo["cachedir"], target_repo["name"]))
737         url = str(target_repo["baseurl"] + "/" + pkgpath)
738         filename = str("%s/%s/packages/%s" % (target_repo["cachedir"], target_repo["name"], os.path.basename(pkgpath)))
739         pkg = myurlgrab(url, filename, target_repo["proxies"])
740         return pkg
741     else:
742         return None
743
744 def get_source_name(pkg, repometadata):
745
746     def get_bin_name(pkg):
747         m = re.match("(.*)\.(.*) (.*)-(.*)", pkg)
748         if m:
749             return m.group(1)
750         return None
751
752     def get_src_name(srpm):
753         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
754         if m:
755             return m.group(1)
756         return None
757
758     ver = ""
759     target_repo = None
760
761     pkg_name = get_bin_name(pkg)
762     if not pkg_name:
763         return None
764
765     for repo in repometadata:
766         if repo["primary"].endswith(".xml"):
767             root = xmlparse(repo["primary"])
768             ns = root.getroot().tag
769             ns = ns[0:ns.rindex("}")+1]
770             for elm in root.getiterator("%spackage" % ns):
771                 if elm.find("%sname" % ns).text == pkg_name:
772                     if elm.find("%sarch" % ns).text != "src":
773                         version = elm.find("%sversion" % ns)
774                         tmpver = "%s-%s" % (version.attrib['ver'], version.attrib['rel'])
775                         if tmpver > ver:
776                             ver = tmpver
777                             fmt = elm.find("%sformat" % ns)
778                             if fmt:
779                                 fns = fmt.getchildren()[0].tag
780                                 fns = fns[0:fns.rindex("}")+1]
781                                 pkgpath = fmt.find("%ssourcerpm" % fns).text
782                                 target_repo = repo
783                         break
784
785         if repo["primary"].endswith(".sqlite"):
786             con = sqlite.connect(repo["primary"])
787             for row in con.execute("select version, release, rpm_sourcerpm from packages where name = \"%s\" and arch != \"src\"" % pkg_name):
788                 tmpver = "%s-%s" % (row[0], row[1])
789                 if tmpver > ver:
790                     pkgpath = "%s" % row[2]
791                     target_repo = repo
792                 break
793             con.close()
794     if target_repo:
795         return get_src_name(pkgpath)
796     else:
797         return None
798
799 def get_release_no(repometadata, distro="meego"):
800     cpio = find_binary_path("cpio")
801     rpm2cpio = find_binary_path("rpm2cpio")
802     release_pkg = get_package("%s-release" % distro, repometadata)
803     if release_pkg:
804         tmpdir = mkdtemp()
805         oldcwd = os.getcwd()
806         os.chdir(tmpdir)
807         p1 = subprocess.Popen([rpm2cpio, release_pkg], stdout = subprocess.PIPE)
808         p2 = subprocess.Popen([cpio, "-idv"], stdin = p1.stdout, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
809         p2.communicate()
810         f = open("%s/etc/%s-release" % (tmpdir, distro), "r")
811         content = f.read()
812         f.close()
813         os.chdir(oldcwd)
814         shutil.rmtree(tmpdir, ignore_errors = True)
815         return content.split(" ")[2]
816     else:
817         return "UNKNOWN"
818
819 def get_kickstarts_from_repos(repometadata):
820     kickstarts = []
821     for repo in repometadata:
822         try:
823             root = xmlparse(repo["repomd"])
824         except SyntaxError:
825             raise CreatorError("repomd.xml syntax error.")
826
827         ns = root.getroot().tag
828         ns = ns[0:ns.rindex("}")+1]
829
830         for elm in root.getiterator("%sdata" % ns):
831             if elm.attrib["type"] == "image-config":
832                 break
833
834         if elm.attrib["type"] != "image-config":
835             continue
836
837         location = elm.find("%slocation" % ns)
838         image_config = str(repo["baseurl"] + "/" + location.attrib["href"])
839         filename = str("%s/%s/image-config.xml%s" % (repo["cachedir"], repo["name"], suffix))
840
841         image_config = get_uncompressed_data_from_url(image_config,filename,repo["proxies"])
842
843         try:
844             root = xmlparse(image_config)
845         except SyntaxError:
846             raise CreatorError("image-config.xml syntax error.")
847
848         for elm in root.getiterator("config"):
849             path = elm.find("path").text
850             path = path.replace("images-config", "image-config")
851             description = elm.find("description").text
852             makedirs(os.path.dirname("%s/%s/%s" % (repo["cachedir"], repo["name"], path)))
853             url = path
854             if "http" not in path:
855                 url = str(repo["baseurl"] + "/" + path)
856             filename = str("%s/%s/%s" % (repo["cachedir"], repo["name"], path))
857             path = myurlgrab(url, filename, repo["proxies"])
858             kickstarts.append({"filename":path,"description":description})
859         return kickstarts
860
861 def select_ks(ksfiles):
862     print "Available kickstart files:"
863     i = 0
864     for ks in ksfiles:
865         i += 1
866         print "\t%d. %s (%s)" % (i, ks["description"], os.path.basename(ks["filename"]))
867     while True:
868         choice = raw_input("Please input your choice and press ENTER. [1..%d] ? " % i)
869         if choice.lower() == "q":
870             sys.exit(1)
871         if choice.isdigit():
872             choice = int(choice)
873             if choice >= 1 and choice <= i:
874                 break
875
876     return ksfiles[choice-1]["filename"]
877
878
879 def get_pkglist_in_patterns(group, patterns):
880     found = False
881     pkglist = []
882     try:
883         root = xmlparse(patterns)
884     except SyntaxError:
885         raise SyntaxError("%s syntax error." % patterns)
886
887     for elm in list(root.getroot()):
888         ns = elm.tag
889         ns = ns[0:ns.rindex("}")+1]
890         name = elm.find("%sname" % ns)
891         summary = elm.find("%ssummary" % ns)
892         if name.text == group or summary.text == group:
893             found = True
894             break
895
896     if not found:
897         return pkglist
898
899     found = False
900     for requires in list(elm):
901         if requires.tag.endswith("requires"):
902             found = True
903             break
904
905     if not found:
906         return pkglist
907
908     for pkg in list(requires):
909         pkgname = pkg.attrib["name"]
910         if pkgname not in pkglist:
911             pkglist.append(pkgname)
912
913     return pkglist
914
915 def get_pkglist_in_comps(group, comps):
916     found = False
917     pkglist = []
918     try:
919         root = xmlparse(comps)
920     except SyntaxError:
921         raise SyntaxError("%s syntax error." % comps)
922
923     for elm in root.getiterator("group"):
924         id = elm.find("id")
925         name = elm.find("name")
926         if id.text == group or name.text == group:
927             packagelist = elm.find("packagelist")
928             found = True
929             break
930
931     if not found:
932         return pkglist
933
934     for require in elm.getiterator("packagereq"):
935         if require.tag.endswith("packagereq"):
936             pkgname = require.text
937         if pkgname not in pkglist:
938             pkglist.append(pkgname)
939
940     return pkglist
941
942 def is_statically_linked(binary):
943     ret = False
944     dev_null = os.open("/dev/null", os.O_WRONLY)
945     filecmd = find_binary_path("file")
946     args = [ filecmd, binary ]
947     file = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=dev_null)
948     output = file.communicate()[0]
949     os.close(dev_null)
950     if output.find(", statically linked, ") > 0:
951         ret = True
952     return ret
953
954 def setup_qemu_emulator(rootdir, arch):
955     # mount binfmt_misc if it doesn't exist
956     if not os.path.exists("/proc/sys/fs/binfmt_misc"):
957         modprobecmd = find_binary_path("modprobe")
958         subprocess.call([modprobecmd, "binfmt_misc"])
959     if not os.path.exists("/proc/sys/fs/binfmt_misc/register"):
960         mountcmd = find_binary_path("mount")
961         subprocess.call([mountcmd, "-t", "binfmt_misc", "none", "/proc/sys/fs/binfmt_misc"])
962
963     # qemu_emulator is a special case, we can't use find_binary_path
964     # qemu emulator should be a statically-linked executable file
965     qemu_emulator = "/usr/bin/qemu-arm"
966     if not os.path.exists(qemu_emulator) or not is_statically_linked(qemu_emulator):
967         qemu_emulator = "/usr/bin/qemu-arm-static"
968     if not os.path.exists(qemu_emulator):
969         raise CreatorError("Please install a statically-linked qemu-arm")
970
971     # qemu_emulator version check
972     arch_list = ["armv7hl", "armv7thl", "armv7nhl", "armv7tnhl"]
973     if arch in arch_list:
974         dev_null = os.open("/dev/null",os.O_WRONLY)
975         pobj = subprocess.Popen([qemu_emulator, "-h"],stdout=subprocess.PIPE,stderr=dev_null);
976         qemuout = pobj.communicate()[0]
977         os.close(dev_null)
978         index = qemuout.find(" version ");
979         qemu_version = qemuout[index+9:index+13]
980         if qemu_version < "0.13":
981             raise CreatorError("Requires %s version >= 0.13 for %s" % (os.path.basename(qemu_emulator), arch))
982
983     if not os.path.exists(rootdir + "/usr/bin"):
984         makedirs(rootdir + "/usr/bin")
985     shutil.copy(qemu_emulator, rootdir + qemu_emulator)
986
987     # disable selinux, selinux will block qemu emulator to run
988     #if os.path.exists("/usr/sbin/setenforce"):
989     #   subprocess.call(["/usr/sbin/setenforce", "0"])
990
991     node = "/proc/sys/fs/binfmt_misc/arm"
992     if is_statically_linked(qemu_emulator) and os.path.exists(node):
993         return qemu_emulator
994
995     # unregister it if it has been registered and is a dynamically-linked executable
996     if not is_statically_linked(qemu_emulator) and os.path.exists(node):
997         qemu_unregister_string = "-1\n"
998         fd = open("/proc/sys/fs/binfmt_misc/arm", "w")
999         fd.write(qemu_unregister_string)
1000         fd.close()
1001
1002     # register qemu emulator for interpreting other arch executable file
1003     if not os.path.exists(node):
1004         qemu_arm_string = ":arm:M::\\x7fELF\\x01\\x01\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x00\\x28\\x00:\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xfa\\xff\\xff\\xff:%s:\n" % qemu_emulator
1005         fd = open("/proc/sys/fs/binfmt_misc/register", "w")
1006         fd.write(qemu_arm_string)
1007         fd.close()
1008
1009     return qemu_emulator
1010
1011 def create_release(config, destdir, name, outimages, release):
1012     """ TODO: This functionality should really be in creator.py inside the
1013     ImageCreator class. """
1014
1015     # For virtual machine images, we have a subdir for it, this is unnecessary
1016     # for release
1017     thatsubdir = None
1018     for i in range(len(outimages)):
1019         file = outimages[i]
1020         if not os.path.isdir(file) and os.path.dirname(file) != destdir:
1021             thatsubdir = os.path.dirname(file)
1022             newfile = os.path.join(destdir, os.path.basename(file))
1023             shutil.move(file, newfile)
1024             outimages[i] = newfile
1025     if thatsubdir:
1026         shutil.rmtree(thatsubdir, ignore_errors = True)
1027
1028     """ Create release directory and files """
1029     os.system ("cp %s %s/%s.ks" % (config, destdir, name))
1030     # When building a release we want to make sure the .ks 
1031     # file generates the same build even when --release= is not used.
1032     fd = open(config, "r")
1033     kscont = fd.read()
1034     fd.close()
1035     kscont = kscont.replace("@BUILD_ID@",release)
1036     fd = open("%s/%s.ks" % (destdir,name), "w")
1037     fd.write(kscont)
1038     fd.close()
1039     outimages.append("%s/%s.ks" % (destdir,name))
1040
1041     # Using system + mv, because of * in filename.
1042     os.system ("mv %s/*-pkgs.txt %s/%s.packages" % (destdir, destdir, name))
1043     outimages.append("%s/%s.packages" % (destdir,name))
1044
1045     d = os.listdir(destdir)
1046     for f in d:
1047         if f.endswith(".iso"):
1048             ff = f.replace(".iso", ".img")
1049             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1050             outimages.append("%s/%s" %(destdir, ff))
1051         elif f.endswith(".usbimg"):
1052             ff = f.replace(".usbimg", ".img")
1053             os.rename("%s/%s" %(destdir, f ), "%s/%s" %(destdir, ff))
1054             outimages.append("%s/%s" %(destdir, ff))
1055
1056     fd = open(destdir + "/MANIFEST", "w")
1057     d = os.listdir(destdir)
1058     for f in d:
1059         if f == "MANIFEST":
1060             continue
1061         if os.path.exists("/usr/bin/md5sum"):
1062             p = subprocess.Popen(["/usr/bin/md5sum", "-b", "%s/%s" %(destdir, f )],
1063                              stdout=subprocess.PIPE)
1064             (md5sum, errorstr) = p.communicate()
1065             if p.returncode != 0:
1066                 logging.warning("Can't generate md5sum for image %s/%s" %(destdir, f ))
1067             else:
1068                 md5sum = md5sum.split(" ")[0]
1069                 fd.write(md5sum+" "+f+"\n")
1070
1071     outimages.append("%s/MANIFEST" % destdir)
1072     fd.close()
1073
1074     """ Update the file list. """
1075     updated_list = []
1076     for file in outimages:
1077         if os.path.exists("%s" % file):
1078             updated_list.append(file)
1079
1080     return updated_list
1081
1082 def get_local_distro():
1083     print "Local linux distribution:"
1084     for file in glob.glob("/etc/*-release"):
1085         fd = open(file, "r")
1086         content = fd.read()
1087         fd.close()
1088         print content
1089     if os.path.exists("/etc/issue"):
1090         fd = open("/etc/issue", "r")
1091         content = fd.read()
1092         fd.close()
1093         print content
1094     print "Local Kernel version: " + os.uname()[2]
1095
1096 def check_mic_installation(argv):
1097     creator_name = os.path.basename(argv[0])
1098     if os.path.exists("/usr/local/bin/" + creator_name) \
1099         and os.path.exists("/usr/bin/" + creator_name):
1100         raise CreatorError("There are two mic2 installations existing, this will result in some unpredictable errors, the reason is installation path of mic2 binary is different from  installation path of mic2 source on debian-based distros, please remove one of them to ensure it can work normally.")
1101
1102 def SrcpkgsDownload(pkgs, repometadata, instroot, cachedir):
1103
1104     def get_source_repometadata(repometadata):
1105         src_repometadata=[]
1106         for repo in repometadata:
1107             if repo["name"].endswith("-source"):
1108                 src_repometadata.append(repo)
1109         if src_repometadata:
1110             return src_repometadata
1111         return None
1112
1113     def get_src_name(srpm):
1114         m = re.match("(.*)-(\d+.*)-(\d+\.\d+).src.rpm", srpm)
1115         if m:
1116             return m.group(1)
1117         return None    
1118
1119     src_repometadata = get_source_repometadata(repometadata)
1120
1121     if not src_repometadata:
1122         print "No source repo found"
1123         return None
1124
1125     src_pkgs = []
1126     lpkgs_dict = {}
1127     lpkgs_path = []
1128     for repo in src_repometadata:
1129         cachepath = "%s/%s/packages/*.src.rpm" %(cachedir, repo["name"])
1130         lpkgs_path += glob.glob(cachepath)
1131     
1132     for lpkg in lpkgs_path:
1133         lpkg_name = get_src_name(os.path.basename(lpkg))
1134         lpkgs_dict[lpkg_name] = lpkg
1135     localpkgs = lpkgs_dict.keys()
1136     
1137     cached_count = 0
1138     destdir = instroot+'/usr/src/SRPMS'
1139     if not os.path.exists(destdir):
1140         os.makedirs(destdir)
1141     
1142     srcpkgset = set()
1143     for _pkg in pkgs:
1144         srcpkg_name = get_source_name(_pkg, repometadata)
1145         if not srcpkg_name:
1146             continue
1147         srcpkgset.add(srcpkg_name)
1148     
1149     for pkg in list(srcpkgset):
1150         if pkg in localpkgs:
1151             cached_count += 1
1152             shutil.copy(lpkgs_dict[pkg], destdir)
1153             src_pkgs.append(os.path.basename(lpkgs_dict[pkg]))
1154         else:
1155             src_pkg = get_package(pkg, src_repometadata, 'src')
1156             if src_pkg:
1157                 shutil.copy(src_pkg, destdir)            
1158                 src_pkgs.append(src_pkg)
1159     print '--------------------------------------------------'
1160     print "%d source packages gotten from cache" %cached_count
1161
1162     return src_pkgs