Change the loadBertosProject function to be a BProject method (and call it from the...
[bertos.git] / wizard / bertos_utils.py
1 #!/usr/bin/env python
2 # encoding: utf-8
3 #
4 # This file is part of BeRTOS.
5 #
6 # Bertos is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
19 #
20 # As a special exception, you may use this file as part of a free software
21 # library without restriction.  Specifically, if other files instantiate
22 # templates or use macros or inline functions from this file, or you compile
23 # this file and link it with other files to produce an executable, this
24 # file does not by itself cause the resulting executable to be covered by
25 # the GNU General Public License.  This exception does not however
26 # invalidate any other reasons why the executable file might be covered by
27 # the GNU General Public License.
28 #
29 # Copyright 2008 Develer S.r.l. (http://www.develer.com/)
30 #
31 # $Id$
32 #
33 # Author: Lorenzo Berni <duplo@develer.com>
34 #
35
36 import os
37 import fnmatch
38 import glob
39 import re
40 import shutil
41 # Use custom copytree function
42 import copytree
43 import pickle
44
45 import const
46 import plugins
47 import DefineException
48 import BProject
49
50 from _wizard_version import WIZARD_VERSION
51
52 from LoadException import VersionException, ToolchainException
53
54 def isBertosDir(directory):
55    return os.path.exists(directory + "/VERSION")
56
57 def bertosVersion(directory):
58    return open(directory + "/VERSION").readline().strip()
59
60 def setEnabledModules(project_info, enabled_modules):
61     modules = project_info.info("MODULES")
62     files = {}
63     for module, information in modules.items():
64         information["enabled"] = module in enabled_modules
65         if information["enabled"]:
66             for dependency in information["depends"]:
67                 if not dependency in modules:
68                     files[dependency] = files.get(dependency, 0) + 1
69     project_info.setInfo("MODULES", modules)
70     project_info.setInfo("FILES", files)
71
72 def enabledModules(project_info):
73     enabled_modules = []
74     for name, module in project_info.info("MODULES").items():
75         if module["enabled"]:
76             enabled_modules.append(name)
77     return enabled_modules
78
79 def mergeSources(srcdir, new_sources, old_sources):
80     # The current mergeSources function provide only a raw copy of the sources in the
81     # created project.
82     #
83     # TODO: implement the three way merge algorithm
84     #
85     shutil.rmtree(srcdir, True)
86     copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST)
87
88 def projectFileGenerator(project_info):
89     directory = project_info.info("PROJECT_PATH")
90     project_data = {}
91     enabled_modules = []
92     for module, information in project_info.info("MODULES").items():
93         if information["enabled"]:
94             enabled_modules.append(module)
95     project_data["ENABLED_MODULES"] = enabled_modules
96     # Use the local BeRTOS version instead of the original one
97     # project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
98     project_data["SOURCES_PATH"] = directory
99     project_data["PROJECT_NAME"] = project_info.info("PROJECT_NAME", os.path.basename(directory))
100     project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN")
101     project_data["CPU_NAME"] = project_info.info("CPU_NAME")
102     project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
103     project_data["OUTPUT"] = project_info.info("OUTPUT")
104     project_data["WIZARD_VERSION"] = WIZARD_VERSION
105     return pickle.dumps(project_data)
106
107 def createBertosProject(project_info, edit=False):
108     directory = project_info.info("PROJECT_PATH")
109     sources_dir = project_info.info("SOURCES_PATH")
110     old_sources_dir = project_info.info("OLD_SOURCES_PATH")
111     if not edit:
112         if os.path.isdir(directory):
113             shutil.rmtree(directory, True)        
114         os.makedirs(directory)
115     # Write the project file
116     f = open(directory + "/project.bertos", "w")
117     f.write(projectFileGenerator(project_info))
118     f.close()
119     # VERSION file
120     version_file = open(os.path.join(const.DATA_DIR, "vtemplates/VERSION"), "r").read()
121     open(directory + "/VERSION", "w").write(versionFileGenerator(project_info, version_file))
122     # Destination source dir
123     srcdir = directory + "/bertos"
124     if not edit:
125         # If not in editing mode it copies all the bertos sources in the /bertos subdirectory of the project
126         shutil.rmtree(srcdir, True)
127         copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
128     elif old_sources_dir:
129         # If in editing mode it merges the current bertos sources with the selected ones
130         # TODO: implement the three way merge algotihm
131         #
132         mergeSources(srcdir, sources_dir, old_sources_dir)
133     # Destination makefile
134     makefile = directory + "/Makefile"
135     makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), 'r').read()
136     makefile = makefileGenerator(project_info, makefile)
137     open(directory + "/Makefile", "w").write(makefile)
138     # Destination project dir
139     # prjdir = directory + "/" + os.path.basename(directory)
140     prjdir = os.path.join(directory, project_info.info("PROJECT_NAME"))
141     if not edit:
142         shutil.rmtree(prjdir, True)
143         os.mkdir(prjdir)
144     # Destination hw files
145     hwdir = prjdir + "/hw"
146     if not edit:
147         shutil.rmtree(hwdir, True)
148         os.mkdir(hwdir)
149     # Copy all the hw files
150     for module, information in project_info.info("MODULES").items():
151         for hwfile in information["hw"]:
152             string = open(sources_dir + "/" + hwfile, "r").read()
153             hwfile_path = hwdir + "/" + os.path.basename(hwfile)
154             if not edit or not os.path.exists(hwfile_path):
155                 # If not in editing mode it copies all the hw files. If in
156                 # editing mode it copies only the files that don't exist yet
157                 open(hwdir + "/" + os.path.basename(hwfile), "w").write(string)
158     # Destination configurations files
159     cfgdir = prjdir + "/cfg"
160     if not edit:
161         shutil.rmtree(cfgdir, True)
162         os.mkdir(cfgdir)
163     # Set properly the autoenabled parameters
164     for module, information in project_info.info("MODULES").items():
165         if "configuration" in information and information["configuration"] != "":
166             configurations = project_info.info("CONFIGURATIONS")
167             configuration = configurations[information["configuration"]]
168             for start, parameter in configuration["paramlist"]:
169                 if "type" in configuration[parameter]["informations"] and configuration[parameter]["informations"]["type"] == "autoenabled":
170                     configuration[parameter]["value"] = "1" if information["enabled"] else "0"
171             project_info.setInfo("CONFIGURATIONS", configurations)
172     # Copy all the configuration files
173     for configuration, information in project_info.info("CONFIGURATIONS").items():
174         string = open(sources_dir + "/" + configuration, "r").read()
175         for start, parameter in information["paramlist"]:
176             infos = information[parameter]
177             value = infos["value"]
178             if "unsigned" in infos["informations"] and infos["informations"]["unsigned"]:
179                 value += "U"
180             if "long" in infos["informations"] and infos["informations"]["long"]:
181                 value += "L"
182             string = sub(string, parameter, value)
183         f = open(cfgdir + "/" + os.path.basename(configuration), "w")
184         f.write(string)
185         f.close()
186     if not edit:
187         # Destination user mk file (only on project creation)
188         makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read()
189         # Deadly performances loss was here :(
190         makefile = userMkGenerator(project_info, makefile)
191         open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
192     # Destination wizard mk file
193     makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_wiz.mk"), "r").read()
194     makefile = mkGenerator(project_info, makefile)
195     open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile)
196     # Destination main.c file
197     if not edit:
198         main = open(os.path.join(const.DATA_DIR, "srctemplates/main.c"), "r").read()
199         open(prjdir + "/main.c", "w").write(main)
200     # Files for selected plugins
201     relevants_files = {}
202     for plugin in project_info.info("OUTPUT"):
203         module = loadPlugin(plugin)
204         relevants_files[plugin] = module.createProject(project_info)
205     project_info.setInfo("RELEVANT_FILES", relevants_files)
206
207 def loadPlugin(plugin):
208     """
209     Returns the given plugin module.
210     """
211     return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
212
213 def versionFileGenerator(project_info, version_file):
214     version = bertosVersion(project_info.info("SOURCES_PATH"))
215     return version_file.replace('$version', version)
216
217 def userMkGenerator(project_info, makefile):
218     mk_data = {}
219     mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
220     mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
221     for key in mk_data:
222         while makefile.find(key) != -1:
223             makefile = makefile.replace(key, mk_data[key])
224     return makefile
225
226 def mkGenerator(project_info, makefile):
227     """
228     Generates the mk file for the current project.
229     """
230     mk_data = {}
231     mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
232     mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ")
233     cpu_mk_parameters = []
234     for key, value in project_info.info("CPU_INFOS").items():
235         if key.startswith(const.MK_PARAM_ID):
236             cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value))
237     mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters)
238     mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info)
239     mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[0])
240     mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[1])
241     mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
242     for key in mk_data:
243         while makefile.find(key) != -1:
244             makefile = makefile.replace(key, mk_data[key])
245     return makefile
246
247 def makefileGenerator(project_info, makefile):
248     """
249     Generate the Makefile for the current project.
250     """
251     # TODO write a general function that works for both the mk file and the Makefile
252     while makefile.find("$pname") != -1:
253         makefile = makefile.replace("$pname", os.path.basename(project_info.info("PROJECT_PATH")))
254     return makefile
255
256 def csrcGenerator(project_info):
257     modules = project_info.info("MODULES")
258     files = project_info.info("FILES")
259     if "harvard" in project_info.info("CPU_INFOS")["CPU_TAGS"]:
260         harvard = True
261     else:
262         harvard = False
263     # file to be included in CSRC variable
264     csrc = []
265     # file to be included in PCSRC variable
266     pcsrc = []
267     # files to be included in CPPASRC variable
268     cppasrc = []
269     # files to be included in CXXSRC variable
270     cxxsrc = []
271     # files to be included in ASRC variable
272     asrc = []
273     # constants to be included at the beginning of the makefile
274     constants = {}
275     for module, information in modules.items():
276         module_files = set([])
277         dependency_files = set([])
278         # assembly sources
279         asm_files = set([])
280         hwdir = os.path.basename(project_info.info("PROJECT_PATH")) + "/hw" 
281         if information["enabled"]:
282             if "constants" in information:
283                 constants.update(information["constants"])
284             cfiles, sfiles = findModuleFiles(module, project_info)
285             module_files |= set(cfiles)
286             asm_files |= set(sfiles)
287             for file in information["hw"]:
288                 if file.endswith(".c"):
289                     module_files |= set([hwdir + "/" + os.path.basename(file)])
290             for file_dependency in information["depends"] + tuple(files.keys()):
291                     dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
292                     dependency_files |= set(dependencyCFiles)
293                     asm_files |= set(dependencySFiles)
294             for file in module_files:
295                 if not harvard or information.get("harvard", "both") == "both":
296                     csrc.append(file)
297                 if harvard and "harvard" in information:
298                     pcsrc.append(file)
299             for file in dependency_files:
300                 csrc.append(file)
301             for file in project_info.info("CPU_INFOS")["C_SRC"]:
302                 csrc.append(file)
303             for file in project_info.info("CPU_INFOS")["PC_SRC"]:
304                 pcsrc.append(file)
305             for file in asm_files:
306                 cppasrc.append(file)
307     for file in project_info.info("CPU_INFOS")["CPPA_SRC"]:
308         cppasrc.append(file)
309     for file in project_info.info("CPU_INFOS")["CXX_SRC"]:
310         cxxsrc.append(file)
311     for file in project_info.info("CPU_INFOS")["ASRC"]:
312         asrc.append(file)
313     csrc = set(csrc)
314     csrc = " \\\n\t".join(csrc) + " \\"
315     pcsrc = set(pcsrc)
316     pcsrc = " \\\n\t".join(pcsrc) + " \\"
317     cppasrc = set(cppasrc)
318     cppasrc = " \\\n\t".join(cppasrc) + " \\"
319     cxxsrc = set(cxxsrc)
320     cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
321     asrc = set(asrc)
322     asrc = " \\\n\t".join(asrc) + " \\"
323     constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()])
324     return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
325
326 def findModuleFiles(module, project_info):
327     # Find the files related to the selected module
328     cfiles = []
329     sfiles = []
330     # .c files related to the module and the cpu architecture
331     for filename, path in project_info.searchFiles(module + ".c") + \
332             project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c"):
333         path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
334         path = replaceSeparators(path)
335         cfiles.append(path + "/" + filename)
336     # .s files related to the module and the cpu architecture
337     for filename, path in project_info.searchFiles(module + ".s") + \
338             project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s") + \
339             project_info.searchFiles(module + ".S") + \
340             project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S"):
341         path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
342         path = replaceSeparators(path)
343         sfiles.append(path + "/" + filename)
344     # .c and .s files related to the module and the cpu tags
345     for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
346         for filename, path in project_info.searchFiles(module + "_" + tag + ".c"):
347             path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
348             if os.sep != "/":
349                 path = replaceSeparators(path)
350             cfiles.append(path + "/" + filename)
351         for filename, path in project_info.searchFiles(module + "_" + tag + ".s") + \
352                 project_info.searchFiles(module + "_" + tag + ".S"):
353             path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
354             path = replaceSeparators(path)
355             sfiles.append(path + "/" + filename)
356     return cfiles, sfiles
357
358 def replaceSeparators(path):
359     """
360     Replace the separators in the given path with unix standard separator.
361     """
362     if os.sep != "/":
363         while path.find(os.sep) != -1:
364             path = path.replace(os.sep, "/")
365     return path
366
367 def getSystemPath():
368     path = os.environ["PATH"]
369     if os.name == "nt":
370         path = path.split(";")
371     else:
372         path = path.split(":")
373     return path
374
375 def findToolchains(path_list):
376     toolchains = []
377     for element in path_list:
378         for toolchain in glob.glob(element+ "/" + const.GCC_NAME):
379             toolchains.append(toolchain)
380     return list(set(toolchains))
381
382 def getToolchainInfo(output):
383     info = {}
384     expr = re.compile("Target: .*")
385     target = expr.findall(output)
386     if len(target) == 1:
387         info["target"] = target[0].split("Target: ")[1]
388     expr = re.compile("gcc version [0-9,.]*")
389     version = expr.findall(output)
390     if len(version) == 1:
391         info["version"] = version[0].split("gcc version ")[1]
392     expr = re.compile("gcc version [0-9,.]* \(.*\)")
393     build = expr.findall(output)
394     if len(build) == 1:
395         build = build[0].split("gcc version ")[1]
396         build = build[build.find("(") + 1 : build.find(")")]
397         info["build"] = build
398     expr = re.compile("Configured with: .*")
399     configured = expr.findall(output)
400     if len(configured) == 1:
401         info["configured"] = configured[0].split("Configured with: ")[1]
402     expr = re.compile("Thread model: .*")
403     thread = expr.findall(output)
404     if len(thread) == 1:
405         info["thread"] = thread[0].split("Thread model: ")[1]
406     return info
407
408 def getToolchainName(toolchain_info):
409     name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
410     return name
411
412 def loadCpuInfos(project_info):
413     cpuInfos = []
414     for definition in project_info.findDefinitions(const.CPU_DEFINITION):
415         cpuInfos.append(getInfos(definition))
416     return cpuInfos
417
418 def getTagSet(cpu_info):
419     tag_set = set([])
420     for cpu in cpu_info:
421         tag_set |= set([cpu["CPU_NAME"]])
422         tag_set |= set(cpu["CPU_TAGS"])
423         tag_set |= set([cpu["TOOLCHAIN"]])
424     return tag_set
425         
426
427 def getInfos(definition):
428     D = {}
429     D.update(const.CPU_DEF)
430     def include(filename, dict = D, directory=definition[1]):
431         execfile(directory + "/" + filename, {}, D)
432     D["include"] = include
433     include(definition[0], D)
434     D["CPU_NAME"] = definition[0].split(".")[0]
435     D["DEFINITION_PATH"] = definition[1] + "/" + definition[0]
436     del D["include"]
437     return D
438
439 def getCommentList(string):
440     comment_list = re.findall(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/", string)
441     comment_list = [re.findall(r"^\s*\* *(.*?)$", comment, re.MULTILINE) for comment in comment_list]
442     return comment_list
443
444 def loadModuleDefinition(first_comment):
445     to_be_parsed = False
446     module_definition = {}
447     for num, line in enumerate(first_comment):
448         index = line.find("$WIZ$")
449         if index != -1:
450             to_be_parsed = True
451             try:
452                 exec line[index + len("$WIZ$ "):] in {}, module_definition
453             except:
454                 raise ParseError(num, line[index:])
455         elif line.find("\\brief") != -1:
456             module_definition["module_description"] = line[line.find("\\brief") + len("\\brief "):]
457     module_dict = {}
458     if "module_name" in module_definition:
459         module_name = module_definition[const.MODULE_DEFINITION["module_name"]]
460         del module_definition[const.MODULE_DEFINITION["module_name"]]
461         module_dict[module_name] = {}
462         if const.MODULE_DEFINITION["module_depends"] in module_definition:
463             depends = module_definition[const.MODULE_DEFINITION["module_depends"]]
464             del module_definition[const.MODULE_DEFINITION["module_depends"]]
465             if type(depends) == str:
466                 depends = (depends,)
467             module_dict[module_name]["depends"] = depends
468         else:
469             module_dict[module_name]["depends"] = ()
470         if const.MODULE_DEFINITION["module_configuration"] in module_definition:
471             module_dict[module_name]["configuration"] = module_definition[const.MODULE_DEFINITION["module_configuration"]]
472             del module_definition[const.MODULE_DEFINITION["module_configuration"]]
473         else:
474             module_dict[module_name]["configuration"] = ""
475         if "module_description" in module_definition:
476             module_dict[module_name]["description"] = module_definition["module_description"]
477             del module_definition["module_description"]
478         if const.MODULE_DEFINITION["module_harvard"] in module_definition:
479             harvard = module_definition[const.MODULE_DEFINITION["module_harvard"]]
480             module_dict[module_name]["harvard"] = harvard
481             del module_definition[const.MODULE_DEFINITION["module_harvard"]]
482         if const.MODULE_DEFINITION["module_hw"] in module_definition:
483             hw = module_definition[const.MODULE_DEFINITION["module_hw"]]
484             del module_definition[const.MODULE_DEFINITION["module_hw"]]
485             if type(hw) == str:
486                 hw = (hw, )
487             module_dict[module_name]["hw"] = hw
488         else:
489             module_dict[module_name]["hw"] = ()
490         if const.MODULE_DEFINITION["module_supports"] in module_definition:
491             supports = module_definition[const.MODULE_DEFINITION["module_supports"]]
492             del module_definition[const.MODULE_DEFINITION["module_supports"]]
493             module_dict[module_name]["supports"] = supports
494         module_dict[module_name]["constants"] = module_definition
495         module_dict[module_name]["enabled"] = False
496     return to_be_parsed, module_dict
497
498 def isSupported(project, module=None, property_id=None):
499     if not module and property_id:
500         item = project.info("CONFIGURATIONS")[property_id[0]][property_id[1]]["informations"]
501     else:
502         item = project.info("MODULES")[module]
503     tag_dict = project.info("ALL_CPU_TAGS")
504     if "supports" in item:
505         support_string = item["supports"]
506         supported = {}
507         try:
508             exec "supported = " + support_string in tag_dict, supported
509         except:
510             raise SupportedException(support_string)
511         return supported["supported"]
512     else:
513         return True
514
515 def loadDefineLists(comment_list):
516     define_list = {}
517     for comment in comment_list:
518         for num, line in enumerate(comment):
519             index = line.find("$WIZ$")
520             if index != -1:
521                 try:
522                     exec line[index + len("$WIZ$ "):] in {}, define_list
523                 except:
524                     raise ParseError(num, line[index:])
525     for key, value in define_list.items():
526         if type(value) == str:
527             define_list[key] = (value,)
528     return define_list
529
530 def getDescriptionInformations(comment):
531     """
532     Take the doxygen comment and strip the wizard informations, returning the tuple
533     (comment, wizard_information)
534     """
535     brief = ""
536     description = ""
537     information = {}
538     for num, line in enumerate(comment):
539         index = line.find("$WIZ$")
540         if index != -1:
541             if len(brief) == 0:
542                 brief += line[:index].strip()
543             else:
544                 description += " " + line[:index]
545             try:
546                 exec line[index + len("$WIZ$ "):] in {}, information
547             except:
548                 raise ParseError(num, line[index:])
549         else:
550             if len(brief) == 0:
551                 brief += line.strip()
552             else:
553                 description += " " + line
554                 description = description.strip()
555     return brief.strip(), description.strip(), information
556
557 def getDefinitionBlocks(text):
558     """
559     Take a text and return a list of tuple (description, name-value).
560     """
561     block = []
562     block_tmp = re.finditer(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE)
563     for match in block_tmp:
564         # Only the first element is needed
565         comment = match.group(1)
566         define = match.group(2)
567         start = match.start()
568         block.append(([re.findall(r"^\s*\* *(.*?)$", line, re.MULTILINE)[0] for line in comment.splitlines()], define, start))
569     for match in re.finditer(r"/{3}\s*([^<].*?)\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
570         comment = match.group(1)
571         define = match.group(2)
572         start = match.start()
573         block.append(([comment], define, start))
574     for match in re.finditer(r"#define\s*(.*?)\s*/{3}<\s*(.+?)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
575         comment = match.group(2)
576         define = match.group(1)
577         start = match.start()
578         block.append(([comment], define, start))
579     return block
580
581 def loadModuleData(project_info, edit=False):
582     module_info_dict = {}
583     list_info_dict = {}
584     configuration_info_dict = {}
585     file_dict = {}
586     for filename, path in project_info.findDefinitions("*.h") + project_info.findDefinitions("*.c") + project_info.findDefinitions("*.s") + project_info.findDefinitions("*.S"):
587         comment_list = getCommentList(open(path + "/" + filename, "r").read())
588         if len(comment_list) > 0:
589             module_info = {}
590             configuration_info = {}
591             try:
592                 to_be_parsed, module_dict = loadModuleDefinition(comment_list[0])
593             except ParseError, err:
594                 raise DefineException.ModuleDefineException(path, err.line_number, err.line)
595             for module, information in module_dict.items():
596                 if "depends" not in information:
597                     information["depends"] = ()
598                 information["depends"] += (filename.split(".")[0],)
599                 information["category"] = os.path.basename(path)
600                 if "configuration" in information and len(information["configuration"]):
601                     configuration = module_dict[module]["configuration"]
602                     try:
603                         configuration_info[configuration] = loadConfigurationInfos(project_info.info("SOURCES_PATH") + "/" + configuration)
604                     except ParseError, err:
605                         raise DefineException.ConfigurationDefineException(project_info.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line)
606                     if edit:
607                         try:
608                             path = project_info.info("PROJECT_NAME")
609                             directory = project_info.info("PROJECT_PATH")
610                             user_configuration = loadConfigurationInfos(directory + "/" + configuration.replace("bertos", path))
611                             configuration_info[configuration] = updateConfigurationValues(configuration_info[configuration], user_configuration)
612                         except ParseError, err:
613                             raise DefineException.ConfigurationDefineException(directory + "/" + configuration.replace("bertos", path))
614             module_info_dict.update(module_dict)
615             configuration_info_dict.update(configuration_info)
616             if to_be_parsed:
617                 try:
618                     list_dict = loadDefineLists(comment_list[1:])
619                     list_info_dict.update(list_dict)
620                 except ParseError, err:
621                     raise DefineException.EnumDefineException(path, err.line_number, err.line)
622     for filename, path in project_info.findDefinitions("*_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".h"):
623         comment_list = getCommentList(open(path + "/" + filename, "r").read())
624         list_info_dict.update(loadDefineLists(comment_list))
625     for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
626         for filename, path in project_info.findDefinitions("*_" + tag + ".h"):
627             comment_list = getCommentList(open(path + "/" + filename, "r").read())
628             list_info_dict.update(loadDefineLists(comment_list))
629     project_info.setInfo("MODULES", module_info_dict)
630     project_info.setInfo("LISTS", list_info_dict)
631     project_info.setInfo("CONFIGURATIONS", configuration_info_dict)
632     project_info.setInfo("FILES", file_dict)
633
634 def formatParamNameValue(text):
635     """
636     Take the given string and return a tuple with the name of the parameter in the first position
637     and the value in the second.
638     """
639     block = re.findall("\s*([^\s]+)\s*(.+?)\s*$", text, re.MULTILINE)
640     return block[0]
641
642 def loadConfigurationInfos(path):
643     """
644     Return the module configurations found in the given file as a dict with the
645     parameter name as key and a dict containig the fields above as value:
646         "value": the value of the parameter
647         "description": the description of the parameter
648         "informations": a dict containig optional informations:
649             "type": "int" | "boolean" | "enum"
650             "min": the minimum value for integer parameters
651             "max": the maximum value for integer parameters
652             "long": boolean indicating if the num is a long
653             "unsigned": boolean indicating if the num is an unsigned
654             "value_list": the name of the enum for enum parameters
655             "conditional_deps": the list of conditional dependencies for boolean parameters
656     """
657     configuration_infos = {}
658     configuration_infos["paramlist"] = []
659     for comment, define, start in getDefinitionBlocks(open(path, "r").read()):
660         name, value = formatParamNameValue(define)
661         brief, description, informations = getDescriptionInformations(comment)
662         configuration_infos["paramlist"].append((start, name))
663         configuration_infos[name] = {}
664         configuration_infos[name]["value"] = value
665         configuration_infos[name]["informations"] = informations
666         if not "type" in configuration_infos[name]["informations"]:
667             configuration_infos[name]["informations"]["type"] = findParameterType(configuration_infos[name])
668         if ("type" in configuration_infos[name]["informations"] and
669                 configuration_infos[name]["informations"]["type"] == "int" and
670                 configuration_infos[name]["value"].find("L") != -1):
671             configuration_infos[name]["informations"]["long"] = True
672             configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("L", "")
673         if ("type" in configuration_infos[name]["informations"] and
674                 configuration_infos[name]["informations"]["type"] == "int" and
675                 configuration_infos[name]["value"].find("U") != -1):
676             configuration_infos[name]["informations"]["unsigned"] = True
677             configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "")
678         if "conditional_deps" in configuration_infos[name]["informations"]:
679             if (type(configuration_infos[name]["informations"]["conditional_deps"]) == str or
680                     type(configuration_infos[name]["informations"]["conditional_deps"]) == unicode):
681                 configuration_infos[name]["informations"]["conditional_deps"] = (configuration_infos[name]["informations"]["conditional_deps"], )
682             elif type(configuration_infos[name]["informations"]["conditional_deps"]) == tuple:
683                 pass
684             else:
685                 configuration_infos[name]["informations"]["conditional_deps"] = ()
686         configuration_infos[name]["description"] = description
687         configuration_infos[name]["brief"] = brief
688     return configuration_infos
689
690 def updateConfigurationValues(def_conf, user_conf):
691     for param in def_conf["paramlist"]:
692         if param[1] in user_conf and "value" in user_conf[param[1]]:
693             def_conf[param[1]]["value"] = user_conf[param[1]]["value"]
694     return def_conf
695
696 def findParameterType(parameter):
697     if "value_list" in parameter["informations"]:
698         return "enum"
699     if "min" in parameter["informations"] or "max" in parameter["informations"] or re.match(r"^\d+U?L?$", parameter["value"]) != None:
700         return "int"
701
702 def sub(string, parameter, value):
703     """
704     Substitute the given value at the given parameter define in the given string
705     """
706     return re.sub(r"(?P<define>#define\s+" + parameter + r"\s+)([^\s]+)", r"\g<define>" + value, string)
707
708 def isInt(informations):
709     """
710     Return True if the value is a simple int.
711     """
712     if ("long" not in informatios or not informations["long"]) and ("unsigned" not in informations or informations["unsigned"]):
713         return True
714     else:
715         return False
716
717 def isLong(informations):
718     """
719     Return True if the value is a long.
720     """
721     if "long" in informations and informations["long"] and "unsigned" not in informations:
722         return True
723     else:
724         return False
725
726 def isUnsigned(informations):
727     """
728     Return True if the value is an unsigned.
729     """
730     if "unsigned" in informations and informations["unsigned"] and "long" not in informations:
731         return True
732     else:
733         return False
734
735 def isUnsignedLong(informations):
736     """
737     Return True if the value is an unsigned long.
738     """
739     if "unsigned" in informations and "long" in informations and informations["unsigned"] and informations["long"]:
740         return True
741     else:
742         return False
743
744 class ParseError(Exception):
745     def __init__(self, line_number, line):
746         Exception.__init__(self)
747         self.line_number = line_number
748         self.line = line
749
750 class SupportedException(Exception):
751     def __init__(self, support_string):
752         Exception.__init__(self)
753         self.support_string = support_string