4 # This file is part of BeRTOS.
6 # Bertos is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 # As a special exception, you may use this file as part of a free software
21 # library without restriction. Specifically, if other files instantiate
22 # templates or use macros or inline functions from this file, or you compile
23 # this file and link it with other files to produce an executable, this
24 # file does not by itself cause the resulting executable to be covered by
25 # the GNU General Public License. This exception does not however
26 # invalidate any other reasons why the executable file might be covered by
27 # the GNU General Public License.
29 # Copyright 2008 Develer S.r.l. (http://www.develer.com/)
33 # Author: Lorenzo Berni <duplo@develer.com>
41 # Use custom copytree function
47 import DefineException
50 from LoadException import VersionException, ToolchainException
52 def isBertosDir(directory):
53 return os.path.exists(directory + "/VERSION")
55 def bertosVersion(directory):
56 return open(directory + "/VERSION").readline().strip()
58 def loadBertosProject(project_file, info_dict):
59 project_data = pickle.loads(open(project_file, "r").read())
60 project_info = BProject.BProject()
61 project_info.setInfo("PROJECT_PATH", os.path.dirname(project_file))
62 if "SOURCES_PATH" in info_dict:
63 project_data["SOURCES_PATH"] = info_dict["SOURCES_PATH"]
64 if os.path.exists(project_data["SOURCES_PATH"]):
65 project_info.setInfo("SOURCES_PATH", project_data["SOURCES_PATH"])
67 raise VersionException(project_info)
68 loadSourceTree(project_info)
69 cpu_name = project_data["CPU_NAME"]
70 project_info.setInfo("CPU_NAME", cpu_name)
71 cpu_info = loadCpuInfos(project_info)
73 if cpu["CPU_NAME"] == cpu_name:
74 project_info.setInfo("CPU_INFOS", cpu)
76 tag_list = getTagSet(cpu_info)
77 # Create, fill and store the dict with the tags
79 for element in tag_list:
80 tag_dict[element] = False
81 infos = project_info.info("CPU_INFOS")
83 if tag in infos["CPU_TAGS"] + [infos["CPU_NAME"], infos["TOOLCHAIN"]]:
87 project_info.setInfo("ALL_CPU_TAGS", tag_dict)
88 if "TOOLCHAIN" in info_dict:
89 project_data["TOOLCHAIN"] = info_dict["TOOLCHAIN"]
90 if os.path.exists(project_data["TOOLCHAIN"]["path"]):
91 project_info.setInfo("TOOLCHAIN", project_data["TOOLCHAIN"])
93 raise ToolchainException(project_info)
94 project_info.setInfo("SELECTED_FREQ", project_data["SELECTED_FREQ"])
95 project_info.setInfo("OUTPUT", project_data["OUTPUT"])
96 loadModuleData(project_info, True)
97 setEnabledModules(project_info, project_data["ENABLED_MODULES"])
100 def setEnabledModules(project_info, enabled_modules):
101 modules = project_info.info("MODULES")
103 for module, information in modules.items():
104 information["enabled"] = module in enabled_modules
105 for dependency in information["depends"]:
106 if not dependency in modules:
107 if dependency in files:
108 files[dependency] += 1
110 files[dependency] = 1
111 project_info.setInfo("MODULES", modules)
112 project_info.setInfo("FILES", files)
114 def enabledModules(project_info):
116 for name, module in project_info.info("MODULES").items():
117 if module["enabled"]:
118 enabled_modules.append(name)
119 return enabled_modules
121 def mergeSources(srcdir, new_sources, old_sources):
122 # The current mergeSources function provide only a raw copy of the sources in the
125 # TODO: implement the three way merge algorithm
127 shutil.rmtree(srcdir, True)
128 copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST)
130 def projectFileGenerator(project_info):
131 directory = project_info.info("PROJECT_PATH")
134 for module, information in project_info.info("MODULES").items():
135 if information["enabled"]:
136 enabled_modules.append(module)
137 project_data["ENABLED_MODULES"] = enabled_modules
138 # Use the local BeRTOS version instead of the original one
139 # project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
140 project_data["SOURCES_PATH"] = directory
141 project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN")
142 project_data["CPU_NAME"] = project_info.info("CPU_NAME")
143 project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
144 project_data["OUTPUT"] = project_info.info("OUTPUT")
145 return pickle.dumps(project_data)
147 def createBertosProject(project_info, edit=False):
148 directory = project_info.info("PROJECT_PATH")
149 sources_dir = project_info.info("SOURCES_PATH")
150 old_sources_dir = project_info.info("OLD_SOURCES_PATH")
152 if os.path.isdir(directory):
153 shutil.rmtree(directory, True)
154 os.makedirs(directory)
155 # Write the project file
156 f = open(directory + "/project.bertos", "w")
157 f.write(projectFileGenerator(project_info))
160 version_file = open(os.path.join(const.DATA_DIR, "vtemplates/VERSION"), "r").read()
161 open(directory + "/VERSION", "w").write(versionFileGenerator(project_info, version_file))
162 # Destination source dir
163 srcdir = directory + "/bertos"
165 # If not in editing mode it copies all the bertos sources in the /bertos subdirectory of the project
166 shutil.rmtree(srcdir, True)
167 copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
168 elif old_sources_dir:
169 # If in editing mode it merges the current bertos sources with the selected ones
170 # TODO: implement the three way merge algotihm
172 mergeSources(srcdir, sources_dir, old_sources_dir)
173 # Destination makefile
174 makefile = directory + "/Makefile"
175 makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), 'r').read()
176 makefile = makefileGenerator(project_info, makefile)
177 open(directory + "/Makefile", "w").write(makefile)
178 # Destination project dir
179 prjdir = directory + "/" + os.path.basename(directory)
181 shutil.rmtree(prjdir, True)
183 # Destination hw files
184 hwdir = prjdir + "/hw"
186 shutil.rmtree(hwdir, True)
188 # Copy all the hw files
189 for module, information in project_info.info("MODULES").items():
190 for hwfile in information["hw"]:
191 string = open(sources_dir + "/" + hwfile, "r").read()
192 hwfile_path = hwdir + "/" + os.path.basename(hwfile)
193 if not edit or not os.path.exists(hwfile_path):
194 # If not in editing mode it copies all the hw files. If in
195 # editing mode it copies only the files that don't exist yet
196 open(hwdir + "/" + os.path.basename(hwfile), "w").write(string)
197 # Destination configurations files
198 cfgdir = prjdir + "/cfg"
200 shutil.rmtree(cfgdir, True)
202 # Set properly the autoenabled parameters
203 for module, information in project_info.info("MODULES").items():
204 if "configuration" in information and information["configuration"] != "":
205 configurations = project_info.info("CONFIGURATIONS")
206 configuration = configurations[information["configuration"]]
207 for start, parameter in configuration["paramlist"]:
208 if "type" in configuration[parameter]["informations"] and configuration[parameter]["informations"]["type"] == "autoenabled":
209 configuration[parameter]["value"] = "1" if information["enabled"] else "0"
210 project_info.setInfo("CONFIGURATIONS", configurations)
211 # Copy all the configuration files
212 for configuration, information in project_info.info("CONFIGURATIONS").items():
213 string = open(sources_dir + "/" + configuration, "r").read()
214 for start, parameter in information["paramlist"]:
215 infos = information[parameter]
216 value = infos["value"]
217 if "unsigned" in infos["informations"] and infos["informations"]["unsigned"]:
219 if "long" in infos["informations"] and infos["informations"]["long"]:
221 string = sub(string, parameter, value)
222 f = open(cfgdir + "/" + os.path.basename(configuration), "w")
226 # Destination user mk file (only on project creation)
227 makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read()
228 makefile = mkGenerator(project_info, makefile)
229 open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
230 # Destination wizard mk file
231 makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_wiz.mk"), "r").read()
232 makefile = mkGenerator(project_info, makefile)
233 open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile)
234 # Destination main.c file
236 main = open(os.path.join(const.DATA_DIR, "srctemplates/main.c"), "r").read()
237 open(prjdir + "/main.c", "w").write(main)
238 # Files for selected plugins
240 for plugin in project_info.info("OUTPUT"):
241 module = loadPlugin(plugin)
242 relevants_files[plugin] = module.createProject(project_info)
243 project_info.setInfo("RELEVANT_FILES", relevants_files)
245 def loadPlugin(plugin):
247 Returns the given plugin module.
249 return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
251 def versionFileGenerator(project_info, version_file):
252 version = bertosVersion(project_info.info("SOURCES_PATH"))
253 return version_file.replace('$version', version)
255 def mkGenerator(project_info, makefile):
257 Generates the mk file for the current project.
260 mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
261 mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ")
262 cpu_mk_parameters = []
263 for key, value in project_info.info("CPU_INFOS").items():
264 if key.startswith(const.MK_PARAM_ID):
265 cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value))
266 mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters)
267 mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info)
268 mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[0])
269 mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[1])
270 mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
272 while makefile.find(key) != -1:
273 makefile = makefile.replace(key, mk_data[key])
276 def makefileGenerator(project_info, makefile):
278 Generate the Makefile for the current project.
280 # TODO write a general function that works for both the mk file and the Makefile
281 while makefile.find("$pname") != -1:
282 makefile = makefile.replace("$pname", os.path.basename(project_info.info("PROJECT_PATH")))
285 def csrcGenerator(project_info):
286 modules = project_info.info("MODULES")
287 files = project_info.info("FILES")
288 if "harvard" in project_info.info("CPU_INFOS")["CPU_TAGS"]:
292 # file to be included in CSRC variable
294 # file to be included in PCSRC variable
296 # files to be included in CPPASRC variable
298 # files to be included in CXXSRC variable
300 # files to be included in ASRC variable
302 # constants to be included at the beginning of the makefile
304 for module, information in modules.items():
305 module_files = set([])
306 dependency_files = set([])
309 hwdir = os.path.basename(project_info.info("PROJECT_PATH")) + "/hw"
310 if information["enabled"]:
311 if "constants" in information:
312 constants.update(information["constants"])
313 cfiles, sfiles = findModuleFiles(module, project_info)
314 module_files |= set(cfiles)
315 asm_files |= set(sfiles)
316 for file in information["hw"]:
317 if file.endswith(".c"):
318 module_files |= set([hwdir + "/" + os.path.basename(file)])
319 for file_dependency in information["depends"] + tuple(files.keys()):
320 dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
321 dependency_files |= set(dependencyCFiles)
322 asm_files |= set(dependencySFiles)
323 for file in module_files:
324 if not harvard or information.get("harvard", "both") == "both":
326 if harvard and "harvard" in information:
328 for file in dependency_files:
330 for file in project_info.info("CPU_INFOS")["C_SRC"]:
332 for file in project_info.info("CPU_INFOS")["PC_SRC"]:
334 for file in asm_files:
336 for file in project_info.info("CPU_INFOS")["CPPA_SRC"]:
338 for file in project_info.info("CPU_INFOS")["CXX_SRC"]:
340 for file in project_info.info("CPU_INFOS")["ASRC"]:
343 csrc = " \\\n\t".join(csrc) + " \\"
345 pcsrc = " \\\n\t".join(pcsrc) + " \\"
346 cppasrc = set(cppasrc)
347 cppasrc = " \\\n\t".join(cppasrc) + " \\"
349 cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
351 asrc = " \\\n\t".join(asrc) + " \\"
352 constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()])
353 return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
355 def findModuleFiles(module, project_info):
356 # Find the files related to the selected module
359 # .c files related to the module and the cpu architecture
360 for filename, path in findDefinitions(module + ".c", project_info) + \
361 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info):
362 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
363 path = replaceSeparators(path)
364 cfiles.append(path + "/" + filename)
365 # .s files related to the module and the cpu architecture
366 for filename, path in findDefinitions(module + ".s", project_info) + \
367 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \
368 findDefinitions(module + ".S", project_info) + \
369 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info):
370 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
371 path = replaceSeparators(path)
372 sfiles.append(path + "/" + filename)
373 # .c and .s files related to the module and the cpu tags
374 for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
375 for filename, path in findDefinitions(module + "_" + tag + ".c", project_info):
376 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
378 path = replaceSeparators(path)
379 cfiles.append(path + "/" + filename)
380 for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \
381 findDefinitions(module + "_" + tag + ".S", project_info):
382 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
383 path = replaceSeparators(path)
384 sfiles.append(path + "/" + filename)
385 return cfiles, sfiles
387 def replaceSeparators(path):
389 Replace the separators in the given path with unix standard separator.
392 while path.find(os.sep) != -1:
393 path = path.replace(os.sep, "/")
397 path = os.environ["PATH"]
399 path = path.split(";")
401 path = path.split(":")
404 def findToolchains(path_list):
406 for element in path_list:
407 for toolchain in glob.glob(element+ "/" + const.GCC_NAME):
408 toolchains.append(toolchain)
409 return list(set(toolchains))
411 def getToolchainInfo(output):
413 expr = re.compile("Target: .*")
414 target = expr.findall(output)
416 info["target"] = target[0].split("Target: ")[1]
417 expr = re.compile("gcc version [0-9,.]*")
418 version = expr.findall(output)
419 if len(version) == 1:
420 info["version"] = version[0].split("gcc version ")[1]
421 expr = re.compile("gcc version [0-9,.]* \(.*\)")
422 build = expr.findall(output)
424 build = build[0].split("gcc version ")[1]
425 build = build[build.find("(") + 1 : build.find(")")]
426 info["build"] = build
427 expr = re.compile("Configured with: .*")
428 configured = expr.findall(output)
429 if len(configured) == 1:
430 info["configured"] = configured[0].split("Configured with: ")[1]
431 expr = re.compile("Thread model: .*")
432 thread = expr.findall(output)
434 info["thread"] = thread[0].split("Thread model: ")[1]
437 def getToolchainName(toolchain_info):
438 name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
441 def loadSourceTree(project):
442 fileList = [f for f in os.walk(project.info("SOURCES_PATH"))]
443 project.setInfo("FILE_LIST", fileList)
445 def findDefinitions(ftype, project):
446 L = project.info("FILE_LIST")
449 for filename in element[2]:
450 if fnmatch.fnmatch(filename, ftype):
451 definitions.append((filename, element[0]))
454 def loadCpuInfos(project):
456 for definition in findDefinitions(const.CPU_DEFINITION, project):
457 cpuInfos.append(getInfos(definition))
460 def getTagSet(cpu_info):
463 tag_set |= set([cpu["CPU_NAME"]])
464 tag_set |= set(cpu["CPU_TAGS"])
465 tag_set |= set([cpu["TOOLCHAIN"]])
469 def getInfos(definition):
471 D.update(const.CPU_DEF)
472 def include(filename, dict = D, directory=definition[1]):
473 execfile(directory + "/" + filename, {}, D)
474 D["include"] = include
475 include(definition[0], D)
476 D["CPU_NAME"] = definition[0].split(".")[0]
477 D["DEFINITION_PATH"] = definition[1] + "/" + definition[0]
481 def getCommentList(string):
482 comment_list = re.findall(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/", string)
483 comment_list = [re.findall(r"^\s*\* *(.*?)$", comment, re.MULTILINE) for comment in comment_list]
486 def loadModuleDefinition(first_comment):
488 module_definition = {}
489 for num, line in enumerate(first_comment):
490 index = line.find("$WIZ$")
494 exec line[index + len("$WIZ$ "):] in {}, module_definition
496 raise ParseError(num, line[index:])
497 elif line.find("\\brief") != -1:
498 module_definition["module_description"] = line[line.find("\\brief") + len("\\brief "):]
500 if "module_name" in module_definition:
501 module_name = module_definition[const.MODULE_DEFINITION["module_name"]]
502 del module_definition[const.MODULE_DEFINITION["module_name"]]
503 module_dict[module_name] = {}
504 if const.MODULE_DEFINITION["module_depends"] in module_definition:
505 depends = module_definition[const.MODULE_DEFINITION["module_depends"]]
506 del module_definition[const.MODULE_DEFINITION["module_depends"]]
507 if type(depends) == str:
509 module_dict[module_name]["depends"] = depends
511 module_dict[module_name]["depends"] = ()
512 if const.MODULE_DEFINITION["module_configuration"] in module_definition:
513 module_dict[module_name]["configuration"] = module_definition[const.MODULE_DEFINITION["module_configuration"]]
514 del module_definition[const.MODULE_DEFINITION["module_configuration"]]
516 module_dict[module_name]["configuration"] = ""
517 if "module_description" in module_definition:
518 module_dict[module_name]["description"] = module_definition["module_description"]
519 del module_definition["module_description"]
520 if const.MODULE_DEFINITION["module_harvard"] in module_definition:
521 harvard = module_definition[const.MODULE_DEFINITION["module_harvard"]]
522 module_dict[module_name]["harvard"] = harvard
523 del module_definition[const.MODULE_DEFINITION["module_harvard"]]
524 if const.MODULE_DEFINITION["module_hw"] in module_definition:
525 hw = module_definition[const.MODULE_DEFINITION["module_hw"]]
526 del module_definition[const.MODULE_DEFINITION["module_hw"]]
529 module_dict[module_name]["hw"] = hw
531 module_dict[module_name]["hw"] = ()
532 if const.MODULE_DEFINITION["module_supports"] in module_definition:
533 supports = module_definition[const.MODULE_DEFINITION["module_supports"]]
534 del module_definition[const.MODULE_DEFINITION["module_supports"]]
535 module_dict[module_name]["supports"] = supports
536 module_dict[module_name]["constants"] = module_definition
537 module_dict[module_name]["enabled"] = False
538 return to_be_parsed, module_dict
540 def isSupported(project, module=None, property_id=None):
541 if not module and property_id:
542 item = project.info("CONFIGURATIONS")[property_id[0]][property_id[1]]["informations"]
544 item = project.info("MODULES")[module]
545 tag_dict = project.info("ALL_CPU_TAGS")
546 if "supports" in item:
547 support_string = item["supports"]
550 exec "supported = " + support_string in tag_dict, supported
552 raise SupportedException(support_string)
553 return supported["supported"]
557 def loadDefineLists(comment_list):
559 for comment in comment_list:
560 for num, line in enumerate(comment):
561 index = line.find("$WIZ$")
564 exec line[index + len("$WIZ$ "):] in {}, define_list
566 raise ParseError(num, line[index:])
567 for key, value in define_list.items():
568 if type(value) == str:
569 define_list[key] = (value,)
572 def getDescriptionInformations(comment):
574 Take the doxygen comment and strip the wizard informations, returning the tuple
575 (comment, wizard_information)
580 for num, line in enumerate(comment):
581 index = line.find("$WIZ$")
584 brief += line[:index].strip()
586 description += " " + line[:index]
588 exec line[index + len("$WIZ$ "):] in {}, information
590 raise ParseError(num, line[index:])
593 brief += line.strip()
595 description += " " + line
596 description = description.strip()
597 return brief.strip(), description.strip(), information
599 def getDefinitionBlocks(text):
601 Take a text and return a list of tuple (description, name-value).
604 block_tmp = re.finditer(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE)
605 for match in block_tmp:
606 # Only the first element is needed
607 comment = match.group(1)
608 define = match.group(2)
609 start = match.start()
610 block.append(([re.findall(r"^\s*\* *(.*?)$", line, re.MULTILINE)[0] for line in comment.splitlines()], define, start))
611 for match in re.finditer(r"/{3}\s*([^<].*?)\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
612 comment = match.group(1)
613 define = match.group(2)
614 start = match.start()
615 block.append(([comment], define, start))
616 for match in re.finditer(r"#define\s*(.*?)\s*/{3}<\s*(.+?)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
617 comment = match.group(2)
618 define = match.group(1)
619 start = match.start()
620 block.append(([comment], define, start))
623 def loadModuleData(project, edit=False):
624 module_info_dict = {}
626 configuration_info_dict = {}
628 for filename, path in findDefinitions("*.h", project) + findDefinitions("*.c", project) + findDefinitions("*.s", project) + findDefinitions("*.S", project):
629 comment_list = getCommentList(open(path + "/" + filename, "r").read())
630 if len(comment_list) > 0:
632 configuration_info = {}
634 to_be_parsed, module_dict = loadModuleDefinition(comment_list[0])
635 except ParseError, err:
636 raise DefineException.ModuleDefineException(path, err.line_number, err.line)
637 for module, information in module_dict.items():
638 if "depends" not in information:
639 information["depends"] = ()
640 information["depends"] += (filename.split(".")[0],)
641 information["category"] = os.path.basename(path)
642 if "configuration" in information and len(information["configuration"]):
643 configuration = module_dict[module]["configuration"]
645 configuration_info[configuration] = loadConfigurationInfos(project.info("SOURCES_PATH") + "/" + configuration)
646 except ParseError, err:
647 raise DefineException.ConfigurationDefineException(project.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line)
650 path = os.path.basename(project.info("PROJECT_PATH"))
651 directory = project.info("PROJECT_PATH")
652 user_configuration = loadConfigurationInfos(directory + "/" + configuration.replace("bertos", path))
653 configuration_info[configuration] = updateConfigurationValues(configuration_info[configuration], user_configuration)
654 except ParseError, err:
655 raise DefineException.ConfigurationDefineException(directory + "/" + configuration.replace("bertos", path))
656 module_info_dict.update(module_dict)
657 configuration_info_dict.update(configuration_info)
660 list_dict = loadDefineLists(comment_list[1:])
661 list_info_dict.update(list_dict)
662 except ParseError, err:
663 raise DefineException.EnumDefineException(path, err.line_number, err.line)
664 for filename, path in findDefinitions("*_" + project.info("CPU_INFOS")["TOOLCHAIN"] + ".h", project):
665 comment_list = getCommentList(open(path + "/" + filename, "r").read())
666 list_info_dict.update(loadDefineLists(comment_list))
667 for tag in project.info("CPU_INFOS")["CPU_TAGS"]:
668 for filename, path in findDefinitions("*_" + tag + ".h", project):
669 comment_list = getCommentList(open(path + "/" + filename, "r").read())
670 list_info_dict.update(loadDefineLists(comment_list))
671 project.setInfo("MODULES", module_info_dict)
672 project.setInfo("LISTS", list_info_dict)
673 project.setInfo("CONFIGURATIONS", configuration_info_dict)
674 project.setInfo("FILES", file_dict)
676 def formatParamNameValue(text):
678 Take the given string and return a tuple with the name of the parameter in the first position
679 and the value in the second.
681 block = re.findall("\s*([^\s]+)\s*(.+?)\s*$", text, re.MULTILINE)
684 def loadConfigurationInfos(path):
686 Return the module configurations found in the given file as a dict with the
687 parameter name as key and a dict containig the fields above as value:
688 "value": the value of the parameter
689 "description": the description of the parameter
690 "informations": a dict containig optional informations:
691 "type": "int" | "boolean" | "enum"
692 "min": the minimum value for integer parameters
693 "max": the maximum value for integer parameters
694 "long": boolean indicating if the num is a long
695 "unsigned": boolean indicating if the num is an unsigned
696 "value_list": the name of the enum for enum parameters
697 "conditional_deps": the list of conditional dependencies for boolean parameters
699 configuration_infos = {}
700 configuration_infos["paramlist"] = []
701 for comment, define, start in getDefinitionBlocks(open(path, "r").read()):
702 name, value = formatParamNameValue(define)
703 brief, description, informations = getDescriptionInformations(comment)
704 configuration_infos["paramlist"].append((start, name))
705 configuration_infos[name] = {}
706 configuration_infos[name]["value"] = value
707 configuration_infos[name]["informations"] = informations
708 if not "type" in configuration_infos[name]["informations"]:
709 configuration_infos[name]["informations"]["type"] = findParameterType(configuration_infos[name])
710 if ("type" in configuration_infos[name]["informations"] and
711 configuration_infos[name]["informations"]["type"] == "int" and
712 configuration_infos[name]["value"].find("L") != -1):
713 configuration_infos[name]["informations"]["long"] = True
714 configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("L", "")
715 if ("type" in configuration_infos[name]["informations"] and
716 configuration_infos[name]["informations"]["type"] == "int" and
717 configuration_infos[name]["value"].find("U") != -1):
718 configuration_infos[name]["informations"]["unsigned"] = True
719 configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "")
720 if "conditional_deps" in configuration_infos[name]["informations"]:
721 if (type(configuration_infos[name]["informations"]["conditional_deps"]) == str or
722 type(configuration_infos[name]["informations"]["conditional_deps"]) == unicode):
723 configuration_infos[name]["informations"]["conditional_deps"] = (configuration_infos[name]["informations"]["conditional_deps"], )
724 elif type(configuration_infos[name]["informations"]["conditional_deps"]) == tuple:
727 configuration_infos[name]["informations"]["conditional_deps"] = ()
728 configuration_infos[name]["description"] = description
729 configuration_infos[name]["brief"] = brief
730 return configuration_infos
732 def updateConfigurationValues(def_conf, user_conf):
733 for param in def_conf["paramlist"]:
734 if param[1] in user_conf and "value" in user_conf[param[1]]:
735 def_conf[param[1]]["value"] = user_conf[param[1]]["value"]
738 def findParameterType(parameter):
739 if "value_list" in parameter["informations"]:
741 if "min" in parameter["informations"] or "max" in parameter["informations"] or re.match(r"^\d+U?L?$", parameter["value"]) != None:
744 def sub(string, parameter, value):
746 Substitute the given value at the given parameter define in the given string
748 return re.sub(r"(?P<define>#define\s+" + parameter + r"\s+)([^\s]+)", r"\g<define>" + value, string)
750 def isInt(informations):
752 Return True if the value is a simple int.
754 if ("long" not in informatios or not informations["long"]) and ("unsigned" not in informations or informations["unsigned"]):
759 def isLong(informations):
761 Return True if the value is a long.
763 if "long" in informations and informations["long"] and "unsigned" not in informations:
768 def isUnsigned(informations):
770 Return True if the value is an unsigned.
772 if "unsigned" in informations and informations["unsigned"] and "long" not in informations:
777 def isUnsignedLong(informations):
779 Return True if the value is an unsigned long.
781 if "unsigned" in informations and "long" in informations and informations["unsigned"] and informations["long"]:
786 class ParseError(Exception):
787 def __init__(self, line_number, line):
788 Exception.__init__(self)
789 self.line_number = line_number
792 class SupportedException(Exception):
793 def __init__(self, support_string):
794 Exception.__init__(self)
795 self.support_string = support_string