4 # This file is part of BeRTOS.
6 # Bertos is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
20 # As a special exception, you may use this file as part of a free software
21 # library without restriction. Specifically, if other files instantiate
22 # templates or use macros or inline functions from this file, or you compile
23 # this file and link it with other files to produce an executable, this
24 # file does not by itself cause the resulting executable to be covered by
25 # the GNU General Public License. This exception does not however
26 # invalidate any other reasons why the executable file might be covered by
27 # the GNU General Public License.
29 # Copyright 2008 Develer S.r.l. (http://www.develer.com/)
33 # Author: Lorenzo Berni <duplo@develer.com>
41 # Use custom copytree function
47 import DefineException
50 def isBertosDir(directory):
51 return os.path.exists(directory + "/VERSION")
53 def bertosVersion(directory):
54 return open(directory + "/VERSION").readline().strip()
56 def loadBertosProject(project_file):
57 project_data = pickle.loads(open(project_file, "r").read())
58 project_info = BProject.BProject()
59 project_info.setInfo("PROJECT_PATH", os.path.dirname(project_file))
60 project_info.setInfo("SOURCES_PATH", project_data["SOURCES_PATH"])
61 project_info.setInfo("TOOLCHAIN", project_data["TOOLCHAIN"])
62 project_info.setInfo("SELECTED_FREQ", project_data["SELECTED_FREQ"])
63 project_info.setInfo("OUTPUT", project_data["OUTPUT"])
64 loadSourceTree(project_info)
65 cpu_name = project_data["CPU_NAME"]
66 project_info.setInfo("CPU_NAME", cpu_name)
67 cpu_info = loadCpuInfos(project_info)
69 if cpu["CPU_NAME"] == cpu_name:
70 project_info.setInfo("CPU_INFOS", cpu)
72 tag_list = getTagSet(cpu_info)
73 # Create, fill and store the dict with the tags
75 for element in tag_list:
76 tag_dict[element] = False
77 infos = project_info.info("CPU_INFOS")
79 if tag in infos["CPU_TAGS"] + [infos["CPU_NAME"], infos["TOOLCHAIN"]]:
83 project_info.setInfo("ALL_CPU_TAGS", tag_dict)
84 loadModuleData(project_info, True)
85 setEnabledModules(project_info, project_data["ENABLED_MODULES"])
88 def setEnabledModules(project_info, enabled_modules):
89 modules = project_info.info("MODULES")
91 for module, information in modules.items():
92 information["enabled"] = module in enabled_modules
93 for dependency in information["depends"]:
94 if not dependency in modules:
95 if dependency in files:
96 files[dependency] += 1
99 project_info.setInfo("MODULES", modules)
100 project_info.setInfo("FILES", files)
102 def enabledModules(project_info):
104 for name, module in project_info.info("MODULES").items():
105 if module["enabled"]:
106 enabled_modules.append(name)
107 return enabled_modules
109 def mergeSources(srcdir, new_sources, old_sources):
110 # The current mergeSources function provide only a raw copy of the sources in the
113 # TODO: implement the three way merge algorithm
115 shutil.rmtree(srcdir, True)
116 copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST)
118 def projectFileGenerator(project_info):
119 directory = project_info.info("PROJECT_PATH")
122 for module, information in project_info.info("MODULES").items():
123 if information["enabled"]:
124 enabled_modules.append(module)
125 project_data["ENABLED_MODULES"] = enabled_modules
126 project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
127 project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN")
128 project_data["CPU_NAME"] = project_info.info("CPU_NAME")
129 project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
130 project_data["OUTPUT"] = project_info.info("OUTPUT")
131 return pickle.dumps(project_data)
133 def createBertosProject(project_info, edit=False):
134 directory = project_info.info("PROJECT_PATH")
135 sources_dir = project_info.info("SOURCES_PATH")
136 old_sources_dir = project_info.info("OLD_SOURCES_PATH")
138 if os.path.isdir(directory):
139 shutil.rmtree(directory, True)
140 os.makedirs(directory)
141 # Write the project file
142 f = open(directory + "/project.bertos", "w")
143 f.write(projectFileGenerator(project_info))
145 # Destination source dir
146 srcdir = directory + "/bertos"
148 # If not in editing mode it copies all the bertos sources in the /bertos subdirectory of the project
149 shutil.rmtree(srcdir, True)
150 copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
151 elif old_sources_dir:
152 # If in editing mode it merges the current bertos sources with the selected ones
153 # TODO: implement the three way merge algotihm
155 mergeSources(srcdir, sources_dir, old_sources_dir)
156 # Destination makefile
157 makefile = directory + "/Makefile"
158 makefile = open("mktemplates/Makefile").read()
159 makefile = makefileGenerator(project_info, makefile)
160 open(directory + "/Makefile", "w").write(makefile)
161 # Destination project dir
162 prjdir = directory + "/" + os.path.basename(directory)
164 shutil.rmtree(prjdir, True)
166 # Destination hw files
167 hwdir = prjdir + "/hw"
169 shutil.rmtree(hwdir, True)
171 # Copy all the hw files
172 for module, information in project_info.info("MODULES").items():
173 for hwfile in information["hw"]:
174 string = open(sources_dir + "/" + hwfile, "r").read()
175 hwfile_path = hwdir + "/" + os.path.basename(hwfile)
176 if not edit or not os.path.exists(hwfile_path):
177 # If not in editing mode it copies all the hw files. If in
178 # editing mode it copies only the files that don't exist yet
179 open(hwdir + "/" + os.path.basename(hwfile), "w").write(string)
180 # Destination configurations files
181 cfgdir = prjdir + "/cfg"
183 shutil.rmtree(cfgdir, True)
185 # Set properly the autoenabled parameters
186 for module, information in project_info.info("MODULES").items():
187 if "configuration" in information and information["configuration"] != "":
188 configurations = project_info.info("CONFIGURATIONS")
189 configuration = configurations[information["configuration"]]
190 for start, parameter in configuration["paramlist"]:
191 if "type" in configuration[parameter]["informations"] and configuration[parameter]["informations"]["type"] == "autoenabled":
192 configuration[parameter]["value"] = "1" if information["enabled"] else "0"
193 project_info.setInfo("CONFIGURATIONS", configurations)
194 # Copy all the configuration files
195 for configuration, information in project_info.info("CONFIGURATIONS").items():
196 string = open(sources_dir + "/" + configuration, "r").read()
197 for start, parameter in information["paramlist"]:
198 infos = information[parameter]
199 value = infos["value"]
200 if "unsigned" in infos["informations"] and infos["informations"]["unsigned"]:
202 if "long" in infos["informations"] and infos["informations"]["long"]:
204 string = sub(string, parameter, value)
205 f = open(cfgdir + "/" + os.path.basename(configuration), "w")
209 # Destination user mk file (only on project creation)
210 makefile = open("mktemplates/template.mk", "r").read()
211 makefile = mkGenerator(project_info, makefile)
212 open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
213 # Destination wizard mk file
214 makefile = open("mktemplates/template_wiz.mk", "r").read()
215 makefile = mkGenerator(project_info, makefile)
216 open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile)
217 # Destination main.c file
219 main = open("srctemplates/main.c", "r").read()
220 open(prjdir + "/main.c", "w").write(main)
221 # Files for selected plugins
223 for plugin in project_info.info("OUTPUT"):
224 module = loadPlugin(plugin)
225 relevants_files[plugin] = module.createProject(project_info)
226 project_info.setInfo("RELEVANT_FILES", relevants_files)
228 def loadPlugin(plugin):
230 Returns the given plugin module.
232 return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
234 def mkGenerator(project_info, makefile):
236 Generates the mk file for the current project.
239 mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
240 mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ")
241 cpu_mk_parameters = []
242 for key, value in project_info.info("CPU_INFOS").items():
243 if key.startswith(const.MK_PARAM_ID):
244 cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value))
245 mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters)
246 mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info)
247 mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[0])
248 mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[1])
249 mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
251 while makefile.find(key) != -1:
252 makefile = makefile.replace(key, mk_data[key])
255 def makefileGenerator(project_info, makefile):
257 Generate the Makefile for the current project.
259 # TODO write a general function that works for both the mk file and the Makefile
260 while makefile.find("$pname") != -1:
261 makefile = makefile.replace("$pname", os.path.basename(project_info.info("PROJECT_PATH")))
264 def csrcGenerator(project_info):
265 modules = project_info.info("MODULES")
266 files = project_info.info("FILES")
267 if "harvard" in project_info.info("CPU_INFOS")["CPU_TAGS"]:
271 # file to be included in CSRC variable
273 # file to be included in PCSRC variable
275 # files to be included in CPPASRC variable
277 # files to be included in CXXSRC variable
279 # files to be included in ASRC variable
281 # constants to be included at the beginning of the makefile
283 for module, information in modules.items():
284 module_files = set([])
285 dependency_files = set([])
288 hwdir = os.path.basename(project_info.info("PROJECT_PATH")) + "/hw"
289 if information["enabled"]:
290 if "constants" in information:
291 constants.update(information["constants"])
292 cfiles, sfiles = findModuleFiles(module, project_info)
293 module_files |= set(cfiles)
294 asm_files |= set(sfiles)
295 for file in information["hw"]:
296 if file.endswith(".c"):
297 module_files |= set([hwdir + "/" + os.path.basename(file)])
298 for file_dependency in information["depends"]:
299 if file_dependency in files:
300 dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
301 dependency_files |= set(dependencyCFiles)
302 asm_files |= set(dependencySFiles)
303 for file in module_files:
304 if not harvard or information.get("harvard", "both") == "both":
306 if harvard and "harvard" in information:
308 for file in dependency_files:
310 for file in project_info.info("CPU_INFOS")["C_SRC"]:
312 for file in project_info.info("CPU_INFOS")["PC_SRC"]:
314 for file in asm_files:
316 for file in project_info.info("CPU_INFOS")["CPPA_SRC"]:
318 for file in project_info.info("CPU_INFOS")["CXX_SRC"]:
320 for file in project_info.info("CPU_INFOS")["ASRC"]:
323 csrc = " \\\n\t".join(csrc) + " \\"
325 pcsrc = " \\\n\t".join(pcsrc) + " \\"
326 cppasrc = set(cppasrc)
327 cppasrc = " \\\n\t".join(cppasrc) + " \\"
329 cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
331 asrc = " \\\n\t".join(asrc) + " \\"
332 constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()])
333 return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
335 def findModuleFiles(module, project_info):
336 # Find the files related to the selected module
339 # .c files related to the module and the cpu architecture
340 for filename, path in findDefinitions(module + ".c", project_info) + \
341 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info):
342 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
343 path = replaceSeparators(path)
344 cfiles.append(path + "/" + filename)
345 # .s files related to the module and the cpu architecture
346 for filename, path in findDefinitions(module + ".s", project_info) + \
347 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \
348 findDefinitions(module + ".S", project_info) + \
349 findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info):
350 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
351 path = replaceSeparators(path)
352 sfiles.append(path + "/" + filename)
353 # .c and .s files related to the module and the cpu tags
354 for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
355 for filename, path in findDefinitions(module + "_" + tag + ".c", project_info):
356 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
358 path = replaceSeparators(path)
359 cfiles.append(path + "/" + filename)
360 for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \
361 findDefinitions(module + "_" + tag + ".S", project_info):
362 path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
363 path = replaceSeparators(path)
364 sfiles.append(path + "/" + filename)
365 return cfiles, sfiles
367 def replaceSeparators(path):
369 Replace the separators in the given path with unix standard separator.
372 while path.find(os.sep) != -1:
373 path = path.replace(os.sep, "/")
377 path = os.environ["PATH"]
379 path = path.split(";")
381 path = path.split(":")
384 def findToolchains(path_list):
386 for element in path_list:
387 for toolchain in glob.glob(element+ "/" + const.GCC_NAME):
388 toolchains.append(toolchain)
389 return list(set(toolchains))
391 def getToolchainInfo(output):
393 expr = re.compile("Target: .*")
394 target = expr.findall(output)
396 info["target"] = target[0].split("Target: ")[1]
397 expr = re.compile("gcc version [0-9,.]*")
398 version = expr.findall(output)
399 if len(version) == 1:
400 info["version"] = version[0].split("gcc version ")[1]
401 expr = re.compile("gcc version [0-9,.]* \(.*\)")
402 build = expr.findall(output)
404 build = build[0].split("gcc version ")[1]
405 build = build[build.find("(") + 1 : build.find(")")]
406 info["build"] = build
407 expr = re.compile("Configured with: .*")
408 configured = expr.findall(output)
409 if len(configured) == 1:
410 info["configured"] = configured[0].split("Configured with: ")[1]
411 expr = re.compile("Thread model: .*")
412 thread = expr.findall(output)
414 info["thread"] = thread[0].split("Thread model: ")[1]
417 def getToolchainName(toolchain_info):
418 name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
421 def loadSourceTree(project):
422 fileList = [f for f in os.walk(project.info("SOURCES_PATH"))]
423 project.setInfo("FILE_LIST", fileList)
425 def findDefinitions(ftype, project):
426 L = project.info("FILE_LIST")
429 for filename in element[2]:
430 if fnmatch.fnmatch(filename, ftype):
431 definitions.append((filename, element[0]))
434 def loadCpuInfos(project):
436 for definition in findDefinitions(const.CPU_DEFINITION, project):
437 cpuInfos.append(getInfos(definition))
440 def getTagSet(cpu_info):
443 tag_set |= set([cpu["CPU_NAME"]])
444 tag_set |= set(cpu["CPU_TAGS"])
445 tag_set |= set([cpu["TOOLCHAIN"]])
449 def getInfos(definition):
451 D.update(const.CPU_DEF)
452 def include(filename, dict = D, directory=definition[1]):
453 execfile(directory + "/" + filename, {}, D)
454 D["include"] = include
455 include(definition[0], D)
456 D["CPU_NAME"] = definition[0].split(".")[0]
457 D["DEFINITION_PATH"] = definition[1] + "/" + definition[0]
461 def getCommentList(string):
462 comment_list = re.findall(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/", string)
463 comment_list = [re.findall(r"^\s*\* *(.*?)$", comment, re.MULTILINE) for comment in comment_list]
466 def loadModuleDefinition(first_comment):
468 module_definition = {}
469 for num, line in enumerate(first_comment):
470 index = line.find("$WIZ$")
474 exec line[index + len("$WIZ$ "):] in {}, module_definition
476 raise ParseError(num, line[index:])
477 elif line.find("\\brief") != -1:
478 module_definition["module_description"] = line[line.find("\\brief") + len("\\brief "):]
480 if "module_name" in module_definition:
481 module_name = module_definition[const.MODULE_DEFINITION["module_name"]]
482 del module_definition[const.MODULE_DEFINITION["module_name"]]
483 module_dict[module_name] = {}
484 if const.MODULE_DEFINITION["module_depends"] in module_definition:
485 depends = module_definition[const.MODULE_DEFINITION["module_depends"]]
486 del module_definition[const.MODULE_DEFINITION["module_depends"]]
487 if type(depends) == str:
489 module_dict[module_name]["depends"] = depends
491 module_dict[module_name]["depends"] = ()
492 if const.MODULE_DEFINITION["module_configuration"] in module_definition:
493 module_dict[module_name]["configuration"] = module_definition[const.MODULE_DEFINITION["module_configuration"]]
494 del module_definition[const.MODULE_DEFINITION["module_configuration"]]
496 module_dict[module_name]["configuration"] = ""
497 if "module_description" in module_definition:
498 module_dict[module_name]["description"] = module_definition["module_description"]
499 del module_definition["module_description"]
500 if const.MODULE_DEFINITION["module_harvard"] in module_definition:
501 harvard = module_definition[const.MODULE_DEFINITION["module_harvard"]]
502 module_dict[module_name]["harvard"] = harvard
503 del module_definition[const.MODULE_DEFINITION["module_harvard"]]
504 if const.MODULE_DEFINITION["module_hw"] in module_definition:
505 hw = module_definition[const.MODULE_DEFINITION["module_hw"]]
506 del module_definition[const.MODULE_DEFINITION["module_hw"]]
509 module_dict[module_name]["hw"] = hw
511 module_dict[module_name]["hw"] = ()
512 if const.MODULE_DEFINITION["module_supports"] in module_definition:
513 supports = module_definition[const.MODULE_DEFINITION["module_supports"]]
514 del module_definition[const.MODULE_DEFINITION["module_supports"]]
515 module_dict[module_name]["supports"] = supports
516 module_dict[module_name]["constants"] = module_definition
517 module_dict[module_name]["enabled"] = False
518 return to_be_parsed, module_dict
520 def isSupported(project, module=None, property_id=None):
521 if not module and property_id:
522 item = project.info("CONFIGURATIONS")[property_id[0]][property_id[1]]["informations"]
524 item = project.info("MODULES")[module]
525 tag_dict = project.info("ALL_CPU_TAGS")
526 if "supports" in item:
527 support_string = item["supports"]
530 exec "supported = " + support_string in tag_dict, supported
532 raise SupportedException(support_string)
533 return supported["supported"]
537 def loadDefineLists(comment_list):
539 for comment in comment_list:
540 for num, line in enumerate(comment):
541 index = line.find("$WIZ$")
544 exec line[index + len("$WIZ$ "):] in {}, define_list
546 raise ParseError(num, line[index:])
547 for key, value in define_list.items():
548 if type(value) == str:
549 define_list[key] = (value,)
552 def getDescriptionInformations(comment):
554 Take the doxygen comment and strip the wizard informations, returning the tuple
555 (comment, wizard_information)
560 for num, line in enumerate(comment):
561 index = line.find("$WIZ$")
564 brief += line[:index].strip()
566 description += " " + line[:index]
568 exec line[index + len("$WIZ$ "):] in {}, information
570 raise ParseError(num, line[index:])
573 brief += line.strip()
575 description += " " + line
576 description = description.strip()
577 return brief.strip(), description.strip(), information
579 def getDefinitionBlocks(text):
581 Take a text and return a list of tuple (description, name-value).
584 block_tmp = re.finditer(r"/\*{2}\s*([^*]*\*(?:[^/*][^*]*\*+)*)/\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE)
585 for match in block_tmp:
586 # Only the first element is needed
587 comment = match.group(1)
588 define = match.group(2)
589 start = match.start()
590 block.append(([re.findall(r"^\s*\* *(.*?)$", line, re.MULTILINE)[0] for line in comment.splitlines()], define, start))
591 for match in re.finditer(r"/{3}\s*([^<].*?)\s*#define\s+((?:[^/]*?/?)+)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
592 comment = match.group(1)
593 define = match.group(2)
594 start = match.start()
595 block.append(([comment], define, start))
596 for match in re.finditer(r"#define\s*(.*?)\s*/{3}<\s*(.+?)\s*?(?:/{2,3}[^<].*?)?$", text, re.MULTILINE):
597 comment = match.group(2)
598 define = match.group(1)
599 start = match.start()
600 block.append(([comment], define, start))
603 def loadModuleData(project, edit=False):
604 module_info_dict = {}
606 configuration_info_dict = {}
608 for filename, path in findDefinitions("*.h", project) + findDefinitions("*.c", project) + findDefinitions("*.s", project) + findDefinitions("*.S", project):
609 comment_list = getCommentList(open(path + "/" + filename, "r").read())
610 if len(comment_list) > 0:
612 configuration_info = {}
614 to_be_parsed, module_dict = loadModuleDefinition(comment_list[0])
615 except ParseError, err:
616 raise DefineException.ModuleDefineException(path, err.line_number, err.line)
617 for module, information in module_dict.items():
618 if "depends" not in information:
619 information["depends"] = ()
620 information["depends"] += (filename.split(".")[0],)
621 information["category"] = os.path.basename(path)
622 if "configuration" in information and len(information["configuration"]):
623 configuration = module_dict[module]["configuration"]
625 configuration_info[configuration] = loadConfigurationInfos(project.info("SOURCES_PATH") + "/" + configuration)
626 except ParseError, err:
627 raise DefineException.ConfigurationDefineException(project.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line)
630 path = os.path.basename(project.info("PROJECT_PATH"))
631 directory = project.info("PROJECT_PATH")
632 user_configuration = loadConfigurationInfos(directory + "/" + configuration.replace("bertos", path))
633 configuration_info[configuration] = updateConfigurationValues(configuration_info[configuration], user_configuration)
634 except ParseError, err:
635 raise DefineException.ConfigurationDefineException(directory + "/" + configuration.replace("bertos", path))
636 module_info_dict.update(module_dict)
637 configuration_info_dict.update(configuration_info)
640 list_dict = loadDefineLists(comment_list[1:])
641 list_info_dict.update(list_dict)
642 except ParseError, err:
643 raise DefineException.EnumDefineException(path, err.line_number, err.line)
644 for filename, path in findDefinitions("*_" + project.info("CPU_INFOS")["TOOLCHAIN"] + ".h", project):
645 comment_list = getCommentList(open(path + "/" + filename, "r").read())
646 list_info_dict.update(loadDefineLists(comment_list))
647 for tag in project.info("CPU_INFOS")["CPU_TAGS"]:
648 for filename, path in findDefinitions("*_" + tag + ".h", project):
649 comment_list = getCommentList(open(path + "/" + filename, "r").read())
650 list_info_dict.update(loadDefineLists(comment_list))
651 project.setInfo("MODULES", module_info_dict)
652 project.setInfo("LISTS", list_info_dict)
653 project.setInfo("CONFIGURATIONS", configuration_info_dict)
654 project.setInfo("FILES", file_dict)
656 def formatParamNameValue(text):
658 Take the given string and return a tuple with the name of the parameter in the first position
659 and the value in the second.
661 block = re.findall("\s*([^\s]+)\s*(.+?)\s*$", text, re.MULTILINE)
664 def loadConfigurationInfos(path):
666 Return the module configurations found in the given file as a dict with the
667 parameter name as key and a dict containig the fields above as value:
668 "value": the value of the parameter
669 "description": the description of the parameter
670 "informations": a dict containig optional informations:
671 "type": "int" | "boolean" | "enum"
672 "min": the minimum value for integer parameters
673 "max": the maximum value for integer parameters
674 "long": boolean indicating if the num is a long
675 "unsigned": boolean indicating if the num is an unsigned
676 "value_list": the name of the enum for enum parameters
678 configuration_infos = {}
679 configuration_infos["paramlist"] = []
680 for comment, define, start in getDefinitionBlocks(open(path, "r").read()):
681 name, value = formatParamNameValue(define)
682 brief, description, informations = getDescriptionInformations(comment)
683 configuration_infos["paramlist"].append((start, name))
684 configuration_infos[name] = {}
685 configuration_infos[name]["value"] = value
686 configuration_infos[name]["informations"] = informations
687 if not "type" in configuration_infos[name]["informations"]:
688 configuration_infos[name]["informations"]["type"] = findParameterType(configuration_infos[name])
689 if ("type" in configuration_infos[name]["informations"] and
690 configuration_infos[name]["informations"]["type"] == "int" and
691 configuration_infos[name]["value"].find("L") != -1):
692 configuration_infos[name]["informations"]["long"] = True
693 configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("L", "")
694 if ("type" in configuration_infos[name]["informations"] and
695 configuration_infos[name]["informations"]["type"] == "int" and
696 configuration_infos[name]["value"].find("U") != -1):
697 configuration_infos[name]["informations"]["unsigned"] = True
698 configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "")
699 configuration_infos[name]["description"] = description
700 configuration_infos[name]["brief"] = brief
701 return configuration_infos
703 def updateConfigurationValues(def_conf, user_conf):
704 for param in def_conf["paramlist"]:
705 if param[1] in user_conf and "value" in user_conf[param[1]]:
706 def_conf[param[1]]["value"] = user_conf[param[1]]["value"]
709 def findParameterType(parameter):
710 if "value_list" in parameter["informations"]:
712 if "min" in parameter["informations"] or "max" in parameter["informations"] or re.match(r"^\d+U?L?$", parameter["value"]) != None:
715 def sub(string, parameter, value):
717 Substitute the given value at the given parameter define in the given string
719 return re.sub(r"(?P<define>#define\s+" + parameter + r"\s+)([^\s]+)", r"\g<define>" + value, string)
721 def isInt(informations):
723 Return True if the value is a simple int.
725 if ("long" not in informatios or not informations["long"]) and ("unsigned" not in informations or informations["unsigned"]):
730 def isLong(informations):
732 Return True if the value is a long.
734 if "long" in informations and informations["long"] and "unsigned" not in informations:
739 def isUnsigned(informations):
741 Return True if the value is an unsigned.
743 if "unsigned" in informations and informations["unsigned"] and "long" not in informations:
748 def isUnsignedLong(informations):
750 Return True if the value is an unsigned long.
752 if "unsigned" in informations and "long" in informations and informations["unsigned"] and informations["long"]:
757 class ParseError(Exception):
758 def __init__(self, line_number, line):
759 Exception.__init__(self)
760 self.line_number = line_number
763 class SupportedException(Exception):
764 def __init__(self, support_string):
765 Exception.__init__(self)
766 self.support_string = support_string