#!/usr/bin/env python
# encoding: utf-8
#
+# This file is part of BeRTOS.
+#
+# Bertos is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+#
+# As a special exception, you may use this file as part of a free software
+# library without restriction. Specifically, if other files instantiate
+# templates or use macros or inline functions from this file, or you compile
+# this file and link it with other files to produce an executable, this
+# file does not by itself cause the resulting executable to be covered by
+# the GNU General Public License. This exception does not however
+# invalidate any other reasons why the executable file might be covered by
+# the GNU General Public License.
+#
# Copyright 2008 Develer S.r.l. (http://www.develer.com/)
-# All rights reserved.
#
-# $Id:$
#
# Author: Lorenzo Berni <duplo@develer.com>
#
import glob
import re
import shutil
+# Use custom copytree function
+import copytree
+import relpath
+import pickle
import const
-import codelite_project
+import plugins
import DefineException
+from _wizard_version import WIZARD_VERSION
+
+from LoadException import VersionException, ToolchainException
+
+def _cmp(x, y):
+ result = cmp(x["info"].get('ord', 0), y["info"].get('ord', 0))
+ if result == 0:
+ result = cmp(
+ x["info"].get("name", x["info"]["filename"]).lower(),
+ y["info"].get("name", y["info"]["filename"]).lower()
+ )
+ return result
+
def isBertosDir(directory):
return os.path.exists(directory + "/VERSION")
def bertosVersion(directory):
return open(directory + "/VERSION").readline().strip()
-def createBertosProject(project_info):
- directory = project_info.info("PROJECT_PATH")
- sources_dir = project_info.info("SOURCES_PATH")
- if not os.path.isdir(directory):
- os.mkdir(directory)
- f = open(directory + "/project.bertos", "w")
- f.write(repr(project_info))
- f.close()
- ## Destination source dir
- srcdir = directory + "/bertos"
+def enabledModules(project_info):
+ enabled_modules = []
+ for name, module in project_info.info("MODULES").items():
+ if module["enabled"]:
+ enabled_modules.append(name)
+ return enabled_modules
+
+def presetList(directory):
+ """
+ Return the list of the preset found in the selected BeRTOS Version.
+ """
+ abspath = os.path.join(directory, const.PREDEFINED_BOARDS_DIR)
+ preset_list = dict([
+ (os.path.join(abspath, preset_dir), presetInfo(os.path.join(abspath, preset_dir)))
+ for preset_dir in os.listdir(os.path.join(directory, const.PREDEFINED_BOARDS_DIR))
+ ])
+ return preset_list
+
+def presetInfo(preset_dir):
+ """
+ Return the preset-relevant info contined into the project_file.
+ """
+ preset_info = pickle.loads(open(os.path.join(preset_dir, "project.bertos"), "r").read())
+ try:
+ description = open(os.path.join(preset_dir, "description"), "r").read()
+ except IOError:
+ # No description file found.
+ description = ""
+ relevant_info = {
+ "CPU_NAME": preset_info.get("CPU_NAME"),
+ "SELECTED_FREQ": preset_info.get("SELECTED_FREQ"),
+ "WIZARD_VERSION": preset_info.get("WIZARD_VERSION", None),
+ "PRESET_NAME": preset_info.get("PROJECT_NAME"),
+ "PRESET_DESCRIPTION": description.decode("utf-8"),
+ }
+ return relevant_info
+
+def mergeSources(srcdir, new_sources, old_sources):
+ # The current mergeSources function provide only a raw copy of the sources in the
+ # created project.
+ #
+ # TODO: implement the three way merge algorithm
+ #
shutil.rmtree(srcdir, True)
- shutil.copytree(sources_dir + "/bertos", srcdir)
- ## Destination makefile
- makefile = directory + "/Makefile"
- if os.path.exists(makefile):
- os.remove(makefile)
- makefile = open("mktemplates/Makefile").read()
- makefile = makefileGenerator(project_info, makefile)
- open(directory + "/Makefile", "w").write(makefile)
- ## Destination project dir
- prjdir = directory + "/" + os.path.basename(directory)
- shutil.rmtree(prjdir, True)
- os.mkdir(prjdir)
- ## Destination configurations files
- cfgdir = prjdir + "/cfg"
- shutil.rmtree(cfgdir, True)
- os.mkdir(cfgdir)
- for key, value in project_info.info("CONFIGURATIONS").items():
- string = open(sources_dir + "/" + key, "r").read()
- for parameter, infos in value.items():
- value = infos["value"]
- if "type" in infos["informations"] and infos["informations"]["type"] == "autoenabled":
- value = "1"
- if "unsigned" in infos["informations"].keys() and infos["informations"]["unsigned"]:
- value += "U"
- if "long" in infos["informations"].keys() and infos["informations"]["long"]:
- value += "L"
- string = sub(string, parameter, value)
- f = open(cfgdir + "/" + os.path.basename(key), "w")
- f.write(string)
- f.close()
- ## Destinatio mk file
- makefile = open("mktemplates/template.mk", "r").read()
- makefile = mkGenerator(project_info, makefile)
- open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
- ## Destination main.c file
- main = open("srctemplates/main.c", "r").read()
- open(prjdir + "/main.c", "w").write(main)
- if "codelite" in project_info.info("OUTPUT"):
- workspace = codeliteWorkspaceGenerator(project_info)
- open(directory + "/" + os.path.basename(prjdir) + ".workspace", "w").write(workspace)
- project = codeliteProjectGenerator(project_info)
- open(directory + "/" + os.path.basename(prjdir) + ".project", "w").write(project)
-
-def mkGenerator(project_info, makefile):
+ copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST)
+
+def projectFileGenerator(project_info):
+ directory = project_info.info("PROJECT_PATH")
+ project_data = {}
+ enabled_modules = []
+ for module, information in project_info.info("MODULES").items():
+ if information["enabled"]:
+ enabled_modules.append(module)
+ project_data["ENABLED_MODULES"] = sorted(enabled_modules)
+ if project_info.info("PRESET"):
+ # For presets save again the BERTOS_PATH into project file
+ project_data["PRESET"] = True
+ project_data["BERTOS_PATH"] = relpath.relpath(project_info.info("BERTOS_PATH"), directory)
+ elif project_info.edit:
+ # If in editing mode the BERTOS_PATH is maintained
+ project_data["BERTOS_PATH"] = relpath.relpath(project_info.info("BERTOS_PATH"), directory)
+ else:
+ # Use the local BeRTOS version instead of the original one
+ # project_data["BERTOS_PATH"] = project_info.info("BERTOS_PATH")
+ project_data["BERTOS_PATH"] = "."
+ project_data["PROJECT_NAME"] = project_info.info("PROJECT_NAME", os.path.basename(directory))
+ project_src_relpath = relpath.relpath(project_info.info("PROJECT_SRC_PATH"), directory)
+ project_data["PROJECT_SRC_PATH"] = project_src_relpath
+ project_data["PROJECT_SRC_PATH_FROM_MAKEFILE"] = project_info.info("PROJECT_SRC_PATH_FROM_MAKEFILE")
+ project_data["TOOLCHAIN"] = {'path': project_info.info("TOOLCHAIN")['path']}
+ project_data["CPU_NAME"] = project_info.info("CPU_NAME")
+ project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
+ project_data["OUTPUT"] = project_info.info("OUTPUT")
+ project_data["WIZARD_VERSION"] = WIZARD_VERSION
+ project_data["PRESET"] = project_info.info("PRESET")
+ project_data["PROJECT_HW_PATH"] = relpath.relpath(project_info.info("PROJECT_HW_PATH"), directory)
+ project_data["PROJECT_HW_PATH_FROM_MAKEFILE"] = project_info.info("PROJECT_HW_PATH_FROM_MAKEFILE")
+ return pickle.dumps(project_data)
+
+def loadPlugin(plugin):
"""
- Generates the mk file for the current project.
+ Returns the given plugin module.
"""
+ return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
+
+def versionFileGenerator(project_info, version_file):
+ version = bertosVersion(project_info.info("BERTOS_PATH"))
+ return version_file.replace('$version', version)
+
+def userMkGeneratorFromPreset(project_info):
+ project_name = project_info.info("PROJECT_NAME")
+ preset_path = project_info.info("PRESET_PATH")
+ preset_name = project_info.info("PRESET_NAME")
+ preset_src_dir = project_info.info("PRESET_SRC_PATH")
+ makefile = open(os.path.join(preset_path, preset_src_dir, "%s_user.mk" %preset_name), 'r').read()
+ destination = os.path.join(project_info.prjdir, "%s_user.mk" %project_info.info("PROJECT_NAME"))
+ # Temporary code.
+ # TODO: write it using regular expressions to secure this function
+ makefile = makefile.replace(preset_name + "_", project_name + "_")
+ open(destination, "w").write(makefile)
+
+def userMkGenerator(project_info):
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_user.mk"), "r").read()
+ destination = os.path.join(project_info.prjdir, os.path.basename(project_info.prjdir) + "_user.mk")
+ # Deadly performances loss was here :(
mk_data = {}
mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
- mk_data["$cpuname"] = project_info.info("CPU_INFOS")["CORE_CPU"]
- mk_data["$cflags"] = " ".join(project_info.info("CPU_INFOS")["C_FLAGS"])
- mk_data["$ldflags"] = " ".join(project_info.info("CPU_INFOS")["LD_FLAGS"])
- mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$constants"] = csrcGenerator(project_info)
- mk_data["$prefix"] = project_info.info("TOOLCHAIN")["path"].split("gcc")[0]
- mk_data["$suffix"] = project_info.info("TOOLCHAIN")["path"].split("gcc")[1]
- mk_data["$cross"] = project_info.info("TOOLCHAIN")["path"].split("gcc")[0]
- mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
+ mk_data["$ppath"] = project_info.info("PROJECT_SRC_PATH_FROM_MAKEFILE")
+ mk_data["$main"] = "/".join(["$(%s_SRC_PATH)" %project_info.info("PROJECT_NAME"), "main.c"])
+ for key in mk_data:
+ makefile = makefile.replace(key, mk_data[key])
+ open(destination, "w").write(makefile)
+
+def mkGenerator(project_info):
+ """
+ Generates the mk file for the current project.
+ """
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read()
+ prjdir = os.path.abspath(project_info.prjdir)
+ destination = os.path.join(prjdir, os.path.basename(prjdir) + ".mk")
+ mk_data = {}
+ mk_data["$pname"] = project_info.info("PROJECT_NAME")
+ mk_data["$ppath"] = project_info.info("PROJECT_SRC_PATH_FROM_MAKEFILE")
+ mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ")
+ cpu_mk_parameters = []
+ for key, value in project_info.info("CPU_INFOS").items():
+ if key.startswith(const.MK_PARAM_ID):
+ cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value))
+ mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters)
+ mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info)
+ mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].rsplit("gcc", 1)[0])
+ mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].rsplit("gcc", 1)[1])
+ mk_data["$hwpath"] = project_info.info("PROJECT_HW_PATH_FROM_MAKEFILE")
for key in mk_data:
- while makefile.find(key) != -1:
- makefile = makefile.replace(key, mk_data[key])
- return makefile
+ makefile = makefile.replace(key, mk_data[key])
+ open(destination, "w").write(makefile)
-def makefileGenerator(project_info, makefile):
+def makefileGenerator(project_info):
"""
Generate the Makefile for the current project.
"""
- # TODO: write a general function that works for both the mk file and the Makefile
- while makefile.find("project_name") != -1:
- makefile = makefile.replace("project_name", os.path.basename(project_info.info("PROJECT_PATH")))
- return makefile
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), "r").read()
+ destination = os.path.join(project_info.maindir, "Makefile")
+ # TODO write a general function that works for both the mk file and the Makefile
+ mk_data = {}
+ mk_data["$pname"] = project_info.info("PROJECT_NAME")
+ mk_data["$ppath"] = project_info.info("PROJECT_SRC_PATH_FROM_MAKEFILE")
+ for key in mk_data:
+ makefile = makefile.replace(key, mk_data[key])
+ open(destination, "w").write(makefile)
def csrcGenerator(project_info):
modules = project_info.info("MODULES")
harvard = True
else:
harvard = False
- ## file to be included in CSRC variable
+ # file to be included in CSRC variable
csrc = []
- ## file to be included in PCSRC variable
+ # file to be included in PCSRC variable
pcsrc = []
- ## constants to be included at the beginning of the makefile
+ # files to be included in CPPASRC variable
+ cppasrc = []
+ # files to be included in CXXSRC variable
+ cxxsrc = []
+ # files to be included in ASRC variable
+ asrc = []
+ # constants to be included at the beginning of the makefile
constants = {}
- module_files = set([])
- dependency_files = set([])
- ## assembly sources
- asm_files = set([])
for module, information in modules.items():
+ module_files = set([])
+ dependency_files = set([])
+ # assembly sources
+ asm_files = set([])
+ hwdir = os.path.basename(project_info.info("PROJECT_PATH")) + "/hw"
if information["enabled"]:
if "constants" in information:
constants.update(information["constants"])
cfiles, sfiles = findModuleFiles(module, project_info)
module_files |= set(cfiles)
asm_files |= set(sfiles)
- for file_dependency in information["depends"]:
- if file_dependency in files:
+ for file in information["hw"]:
+ if file.endswith(".c"):
+ module_files |= set([hwdir + "/" + os.path.basename(file)])
+ for file_dependency in information["depends"] + tuple(files.keys()):
dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
dependency_files |= set(dependencyCFiles)
asm_files |= set(dependencySFiles)
for file in module_files:
- if not harvard or "harvard" not in information or information["harvard"] == "both":
+ if not harvard or information.get("harvard", "both") == "both":
csrc.append(file)
if harvard and "harvard" in information:
pcsrc.append(file)
for file in dependency_files:
csrc.append(file)
+ for file in project_info.info("CPU_INFOS")["C_SRC"]:
+ csrc.append(file)
+ for file in project_info.info("CPU_INFOS")["PC_SRC"]:
+ pcsrc.append(file)
+ for file in asm_files:
+ cppasrc.append(file)
+ for file in project_info.info("CPU_INFOS")["CPPA_SRC"]:
+ cppasrc.append(file)
+ for file in project_info.info("CPU_INFOS")["CXX_SRC"]:
+ cxxsrc.append(file)
+ for file in project_info.info("CPU_INFOS")["ASRC"]:
+ asrc.append(file)
+ csrc = sorted(set(csrc))
csrc = " \\\n\t".join(csrc) + " \\"
+ pcsrc = sorted(set(pcsrc))
pcsrc = " \\\n\t".join(pcsrc) + " \\"
- constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + str(value) for key, value in constants.items()])
- return csrc, pcsrc, constants
-
+ cppasrc = sorted(set(cppasrc))
+ cppasrc = " \\\n\t".join(cppasrc) + " \\"
+ cxxsrc = sorted(set(cxxsrc))
+ cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
+ asrc = sorted(set(asrc))
+ asrc = " \\\n\t".join(asrc) + " \\"
+ constants = "\n".join([project_info.info("PROJECT_NAME") + "_" + key + " = " + unicode(value) for key, value in constants.items()])
+ return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
+
def findModuleFiles(module, project_info):
- ## Find the files related to the selected module
+ # Find the files related to the selected module
cfiles = []
sfiles = []
- ## .c files related to the module and the cpu architecture
- for filename, path in findDefinitions(module + ".c", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info):
- path = path.replace(project_info.info("SOURCES_PATH") + "/", "")
+ # .c files related to the module and the cpu architecture
+ for filename, path in project_info.searchFiles(module + ".c"):
+ path = path.replace(project_info.info("BERTOS_PATH") + os.sep, "")
+ path = replaceSeparators(path)
cfiles.append(path + "/" + filename)
- ## .s files related to the module and the cpu architecture
- for filename, path in findDefinitions(module + ".s", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \
- findDefinitions(module + ".S", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info):
- path = path.replace(project_info.info("SOURCES_PATH") + "/", "")
+ # .s files related to the module and the cpu architecture
+ for filename, path in project_info.searchFiles(module + ".s") + \
+ project_info.searchFiles(module + ".S"):
+ path = path.replace(project_info.info("BERTOS_PATH") + os.sep, "")
+ path = replaceSeparators(path)
sfiles.append(path + "/" + filename)
- ## .c and .s files related to the module and the cpu tags
- for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
- for filename, path in findDefinitions(module + "_" + tag + ".c", project_info):
- path = path.replace(project_info.info("SOURCES_PATH") + "/", "")
+ # .c and .s files related to the module and the cpu tags
+ tags = project_info.info("CPU_INFOS")["CPU_TAGS"]
+
+ # Awful, but secure check for version
+ # TODO: split me in a method/function
+ try:
+ version_string = bertosVersion(project_info.info("BERTOS_PATH"))
+ version_list = [int(i) for i in version_string.split()[1].split('.')]
+ except ValueError:
+ # If the version file hasn't a valid version number assume it's an older
+ # project.
+ version_list = [0, 0]
+ if version_list < [2, 5]:
+ # For older versions of BeRTOS add the toolchain to the tags
+ tags.append(project_info.info("CPU_INFOS")["TOOLCHAIN"])
+
+ for tag in tags:
+ for filename, path in project_info.searchFiles(module + "_" + tag + ".c"):
+ path = path.replace(project_info.info("BERTOS_PATH") + os.sep, "")
+ if os.sep != "/":
+ path = replaceSeparators(path)
cfiles.append(path + "/" + filename)
- for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \
- findDefinitions(module + "_" + tag + ".S", project_info):
- path = path.replace(project_info.info("SOURCES_PATH") + "/", "")
+ for filename, path in project_info.searchFiles(module + "_" + tag + ".s") + \
+ project_info.searchFiles(module + "_" + tag + ".S"):
+ path = path.replace(project_info.info("BERTOS_PATH") + os.sep, "")
+ path = replaceSeparators(path)
sfiles.append(path + "/" + filename)
return cfiles, sfiles
-def codeliteProjectGenerator(project_info):
- template = open("cltemplates/bertos.project").read()
- filelist = "\n".join(codelite_project.clFiles(codelite_project.findSources(project_info.info("PROJECT_PATH")), project_info.info("PROJECT_PATH")))
- while template.find("$filelist") != -1:
- template = template.replace("$filelist", filelist)
- project_name = os.path.basename(project_info.info("PROJECT_PATH"))
- while template.find("$project") != -1:
- template = template.replace("$project", project_name)
- return template
-
-def codeliteWorkspaceGenerator(project_info):
- template = open("cltemplates/bertos.workspace").read()
- project_name = os.path.basename(project_info.info("PROJECT_PATH"))
- while template.find("$project") != -1:
- template = template.replace("$project", project_name)
- return template
-
+def replaceSeparators(path):
+ """
+ Replace the separators in the given path with unix standard separator.
+ """
+ if os.sep != "/":
+ path = path.replace(os.sep, "/")
+ return path
+
def getSystemPath():
path = os.environ["PATH"]
- if os.name == "nt":
- path = path.split(";")
- else:
- path = path.split(":")
+ path = path.split(os.pathsep)
return path
+def findInPath(file, path=None):
+ if path is None:
+ path = os.environ.get('PATH', '')
+ if type(path) is type(''):
+ path = path.split(os.pathsep)
+ return filter(os.path.exists, map(lambda dir, file=file: os.path.join(dir, file), path))
+
+
def findToolchains(path_list):
toolchains = []
for element in path_list:
for toolchain in glob.glob(element+ "/" + const.GCC_NAME):
- toolchains.append(toolchain)
+ if not os.path.isdir(toolchain):
+ toolchains.append(toolchain)
return list(set(toolchains))
def getToolchainInfo(output):
info["thread"] = thread[0].split("Thread model: ")[1]
return info
-def loadSourceTree(project):
- fileList = [f for f in os.walk(project.info("SOURCES_PATH"))]
- project.setInfo("FILE_LIST", fileList)
-
-def findDefinitions(ftype, project):
- L = project.info("FILE_LIST")
- definitions = []
- for element in L:
- for filename in element[2]:
- if fnmatch.fnmatch(filename, ftype):
- definitions.append((filename, element[0]))
- return definitions
-
-def loadCpuInfos(project):
- cpuInfos = []
- for definition in findDefinitions(const.CPU_DEFINITION, project):
- cpuInfos.append(getInfos(definition))
- return cpuInfos
+def getToolchainName(toolchain_info):
+ name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
+ return name
+
+def getTagSet(cpu_info):
+ tag_set = set([])
+ for cpu in cpu_info:
+ tag_set |= set([cpu["CPU_NAME"]])
+ tag_set |= set(cpu["CPU_TAGS"])
+ tag_set |= set([cpu["TOOLCHAIN"]])
+ return tag_set
+
def getInfos(definition):
D = {}
elif line.find("\\brief") != -1:
module_definition["module_description"] = line[line.find("\\brief") + len("\\brief "):]
module_dict = {}
- if "module_name" in module_definition.keys():
+ if "module_name" in module_definition:
module_name = module_definition[const.MODULE_DEFINITION["module_name"]]
del module_definition[const.MODULE_DEFINITION["module_name"]]
module_dict[module_name] = {}
- if const.MODULE_DEFINITION["module_depends"] in module_definition.keys():
- if type(module_definition[const.MODULE_DEFINITION["module_depends"]]) == str:
- module_definition[const.MODULE_DEFINITION["module_depends"]] = (module_definition[const.MODULE_DEFINITION["module_depends"]],)
- module_dict[module_name]["depends"] = module_definition[const.MODULE_DEFINITION["module_depends"]]
+ if const.MODULE_DEFINITION["module_depends"] in module_definition:
+ depends = module_definition[const.MODULE_DEFINITION["module_depends"]]
del module_definition[const.MODULE_DEFINITION["module_depends"]]
+ if type(depends) == str:
+ depends = (depends,)
+ module_dict[module_name]["depends"] = depends
else:
module_dict[module_name]["depends"] = ()
- if const.MODULE_DEFINITION["module_configuration"] in module_definition.keys():
+ if const.MODULE_DEFINITION["module_configuration"] in module_definition:
module_dict[module_name]["configuration"] = module_definition[const.MODULE_DEFINITION["module_configuration"]]
del module_definition[const.MODULE_DEFINITION["module_configuration"]]
else:
module_dict[module_name]["configuration"] = ""
- if "module_description" in module_definition.keys():
+ if "module_description" in module_definition:
module_dict[module_name]["description"] = module_definition["module_description"]
del module_definition["module_description"]
- if const.MODULE_DEFINITION["module_harvard"] in module_definition.keys():
+ if const.MODULE_DEFINITION["module_harvard"] in module_definition:
harvard = module_definition[const.MODULE_DEFINITION["module_harvard"]]
- if harvard == "both" or harvard == "pgm_memory":
- module_dict[module_name]["harvard"] = harvard
+ module_dict[module_name]["harvard"] = harvard
del module_definition[const.MODULE_DEFINITION["module_harvard"]]
+ if const.MODULE_DEFINITION["module_hw"] in module_definition:
+ hw = module_definition[const.MODULE_DEFINITION["module_hw"]]
+ del module_definition[const.MODULE_DEFINITION["module_hw"]]
+ if type(hw) == str:
+ hw = (hw, )
+ module_dict[module_name]["hw"] = hw
+ else:
+ module_dict[module_name]["hw"] = ()
+ if const.MODULE_DEFINITION["module_supports"] in module_definition:
+ supports = module_definition[const.MODULE_DEFINITION["module_supports"]]
+ del module_definition[const.MODULE_DEFINITION["module_supports"]]
+ module_dict[module_name]["supports"] = supports
module_dict[module_name]["constants"] = module_definition
module_dict[module_name]["enabled"] = False
return to_be_parsed, module_dict
+def isSupported(project, module=None, property_id=None):
+ if not module and property_id:
+ item = project.info("CONFIGURATIONS")[property_id[0]][property_id[1]]["informations"]
+ else:
+ item = project.info("MODULES")[module]
+ tag_dict = project.info("ALL_CPU_TAGS")
+ if "supports" in item:
+ support_string = item["supports"]
+ supported = {}
+ try:
+ exec "supported = " + support_string in tag_dict, supported
+ except:
+ raise SupportedException(support_string)
+ return supported["supported"]
+ else:
+ return True
+
def loadDefineLists(comment_list):
define_list = {}
for comment in comment_list:
define_list[key] = (value,)
return define_list
-def getDescriptionInformations(comment):
- """
- Take the doxygen comment and strip the wizard informations, returning the tuple
- (comment, wizard_information)
+def getDescriptionInformations(comment):
+ """
+ Take the doxygen comment and strip the wizard informations, returning the tuple
+ (comment, wizard_information)
"""
brief = ""
description = ""
block.append(([comment], define, start))
return block
-def loadModuleData(project):
- module_info_dict = {}
- list_info_dict = {}
- configuration_info_dict = {}
- file_dict = {}
- for filename, path in findDefinitions("*.h", project) + findDefinitions("*.c", project) + findDefinitions("*.s", project) + findDefinitions("*.S", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- if len(comment_list) > 0:
- module_info = {}
- configuration_info = {}
- try:
- to_be_parsed, module_dict = loadModuleDefinition(comment_list[0])
- except ParseError, err:
- raise DefineException.ModuleDefineException(path, err.line_number, err.line)
- for module, information in module_dict.items():
- information["category"] = os.path.basename(path)
- if "configuration" in information.keys() and len(information["configuration"]):
- configuration = module_dict[module]["configuration"]
- try:
- configuration_info[configuration] = loadConfigurationInfos(project.info("SOURCES_PATH") + "/" + configuration)
- except ParseError, err:
- raise DefineException.ConfigurationDefineException(project.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line)
- module_info_dict.update(module_dict)
- configuration_info_dict.update(configuration_info)
- if to_be_parsed:
- try:
- list_dict = loadDefineLists(comment_list[1:])
- list_info_dict.update(list_dict)
- except ParseError, err:
- raise DefineException.EnumDefineException(path, err.line_number, err.line)
- for filename, path in findDefinitions("*_" + project.info("CPU_INFOS")["TOOLCHAIN"] + ".h", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- list_info_dict.update(loadDefineLists(comment_list))
- for tag in project.info("CPU_INFOS")["CPU_TAGS"]:
- for filename, path in findDefinitions("*_" + tag + ".h", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- list_info_dict.update(loadDefineLists(comment_list))
- project.setInfo("MODULES", module_info_dict)
- project.setInfo("LISTS", list_info_dict)
- project.setInfo("CONFIGURATIONS", configuration_info_dict)
- project.setInfo("FILES", file_dict)
-
def formatParamNameValue(text):
"""
Take the given string and return a tuple with the name of the parameter in the first position
"long": boolean indicating if the num is a long
"unsigned": boolean indicating if the num is an unsigned
"value_list": the name of the enum for enum parameters
+ "conditional_deps": the list of conditional dependencies for boolean parameters
"""
configuration_infos = {}
+ configuration_infos["paramlist"] = []
for comment, define, start in getDefinitionBlocks(open(path, "r").read()):
name, value = formatParamNameValue(define)
brief, description, informations = getDescriptionInformations(comment)
+ configuration_infos["paramlist"].append((start, name))
configuration_infos[name] = {}
configuration_infos[name]["value"] = value
configuration_infos[name]["informations"] = informations
if not "type" in configuration_infos[name]["informations"]:
configuration_infos[name]["informations"]["type"] = findParameterType(configuration_infos[name])
- if ("type" in configuration_infos[name]["informations"].keys() and
+ if ("type" in configuration_infos[name]["informations"] and
configuration_infos[name]["informations"]["type"] == "int" and
configuration_infos[name]["value"].find("L") != -1):
configuration_infos[name]["informations"]["long"] = True
configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("L", "")
- if ("type" in configuration_infos[name]["informations"].keys() and
+ if ("type" in configuration_infos[name]["informations"] and
configuration_infos[name]["informations"]["type"] == "int" and
configuration_infos[name]["value"].find("U") != -1):
configuration_infos[name]["informations"]["unsigned"] = True
configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "")
+ if "conditional_deps" in configuration_infos[name]["informations"]:
+ if (type(configuration_infos[name]["informations"]["conditional_deps"]) == str or
+ type(configuration_infos[name]["informations"]["conditional_deps"]) == unicode):
+ configuration_infos[name]["informations"]["conditional_deps"] = (configuration_infos[name]["informations"]["conditional_deps"], )
+ elif type(configuration_infos[name]["informations"]["conditional_deps"]) == tuple:
+ pass
+ else:
+ configuration_infos[name]["informations"]["conditional_deps"] = ()
configuration_infos[name]["description"] = description
configuration_infos[name]["brief"] = brief
return configuration_infos
+def updateConfigurationValues(def_conf, user_conf):
+ for param in def_conf["paramlist"]:
+ if param[1] in user_conf and "value" in user_conf[param[1]]:
+ def_conf[param[1]]["value"] = user_conf[param[1]]["value"]
+ return def_conf
+
def findParameterType(parameter):
if "value_list" in parameter["informations"]:
return "enum"
"""
Return True if the value is a simple int.
"""
- if ("long" not in informatios.keys() or not informations["long"]) and ("unsigned" not in informations.keys() or informations["unsigned"]):
+ if ("long" not in informatios or not informations["long"]) and ("unsigned" not in informations or informations["unsigned"]):
return True
else:
return False
"""
Return True if the value is a long.
"""
- if "long" in informations.keys() and informations["long"] and "unsigned" not in informations.keys():
+ if "long" in informations and informations["long"] and "unsigned" not in informations:
return True
else:
return False
"""
Return True if the value is an unsigned.
"""
- if "unsigned" in informations.keys() and informations["unsigned"] and "long" not in informations.keys():
+ if "unsigned" in informations and informations["unsigned"] and "long" not in informations:
return True
else:
return False
"""
Return True if the value is an unsigned long.
"""
- if "unsigned" in informations.keys() and "long" in informations.keys() and informations["unsigned"] and informations["long"]:
+ if "unsigned" in informations and "long" in informations and informations["unsigned"] and informations["long"]:
return True
else:
return False
Exception.__init__(self)
self.line_number = line_number
self.line = line
+
+class SupportedException(Exception):
+ def __init__(self, support_string):
+ Exception.__init__(self)
+ self.support_string = support_string