import const
import plugins
import DefineException
-import BProject
+
+from _wizard_version import WIZARD_VERSION
+
+from LoadException import VersionException, ToolchainException
def isBertosDir(directory):
return os.path.exists(directory + "/VERSION")
def bertosVersion(directory):
return open(directory + "/VERSION").readline().strip()
-def loadBertosProject(project_file):
- project_data = pickle.loads(open(project_file, "r").read())
- project_info = BProject.BProject()
- project_info.setInfo("PROJECT_PATH", os.path.dirname(project_file))
- project_info.setInfo("SOURCES_PATH", project_data["SOURCES_PATH"])
- project_info.setInfo("TOOLCHAIN", project_data["TOOLCHAIN"])
- project_info.setInfo("SELECTED_FREQ", project_data["SELECTED_FREQ"])
- project_info.setInfo("OUTPUT", project_data["OUTPUT"])
- loadSourceTree(project_info)
- cpu_name = project_data["CPU_NAME"]
- project_info.setInfo("CPU_NAME", cpu_name)
- cpu_info = loadCpuInfos(project_info)
- for cpu in cpu_info:
- if cpu["CPU_NAME"] == cpu_name:
- project_info.setInfo("CPU_INFOS", cpu)
- break
- tag_list = getTagSet(cpu_info)
- # Create, fill and store the dict with the tags
- tag_dict = {}
- for element in tag_list:
- tag_dict[element] = False
- infos = project_info.info("CPU_INFOS")
- for tag in tag_dict:
- if tag in infos["CPU_TAGS"] + [infos["CPU_NAME"], infos["TOOLCHAIN"]]:
- tag_dict[tag] = True
- else:
- tag_dict[tag] = False
- project_info.setInfo("ALL_CPU_TAGS", tag_dict)
- loadModuleData(project_info, True)
- setEnabledModules(project_info, project_data["ENABLED_MODULES"])
- return project_info
-
def setEnabledModules(project_info, enabled_modules):
modules = project_info.info("MODULES")
files = {}
for module, information in modules.items():
information["enabled"] = module in enabled_modules
- for dependency in information["depends"]:
- if not dependency in modules:
- if dependency in files:
- files[dependency] += 1
- else:
- files[dependency] = 1
+ if information["enabled"]:
+ for dependency in information["depends"]:
+ if not dependency in modules:
+ files[dependency] = files.get(dependency, 0) + 1
project_info.setInfo("MODULES", modules)
project_info.setInfo("FILES", files)
enabled_modules.append(name)
return enabled_modules
+def presetList(directory):
+ """
+ Return the list of the preset found in the selected BeRTOS Version.
+ """
+ abspath = os.path.join(directory, const.PREDEFINED_BOARDS_DIR)
+ preset_list = dict([
+ (os.path.join(abspath, preset_dir), presetInfo(os.path.join(abspath, preset_dir)))
+ for preset_dir in os.listdir(os.path.join(directory, const.PREDEFINED_BOARDS_DIR))
+ ])
+ return preset_list
+
+def presetInfo(preset_dir):
+ """
+ Return the preset-relevant info contined into the project_file.
+ """
+ preset_info = pickle.loads(open(os.path.join(preset_dir, "project.bertos"), "r").read())
+ try:
+ description = open(os.path.join(preset_dir, "description"), "r").read()
+ except IOError:
+ # No description file found.
+ description = ""
+ relevant_info = {
+ "CPU_NAME": preset_info.get("CPU_NAME"),
+ "SELECTED_FREQ": preset_info.get("SELECTED_FREQ"),
+ "WIZARD_VERSION": preset_info.get("WIZARD_VERSION", None),
+ "PRESET_NAME": preset_info.get("PROJECT_NAME"),
+ "PRESET_DESCRIPTION": description.decode("utf-8"),
+ }
+ return relevant_info
+
def mergeSources(srcdir, new_sources, old_sources):
# The current mergeSources function provide only a raw copy of the sources in the
# created project.
# TODO: implement the three way merge algorithm
#
shutil.rmtree(srcdir, True)
- copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
+ copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST)
def projectFileGenerator(project_info):
directory = project_info.info("PROJECT_PATH")
if information["enabled"]:
enabled_modules.append(module)
project_data["ENABLED_MODULES"] = enabled_modules
- project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
+ # Use the local BeRTOS version instead of the original one
+ # project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
+ project_data["SOURCES_PATH"] = directory
+ project_data["PROJECT_NAME"] = project_info.info("PROJECT_NAME", os.path.basename(directory))
project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN")
project_data["CPU_NAME"] = project_info.info("CPU_NAME")
project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
project_data["OUTPUT"] = project_info.info("OUTPUT")
+ project_data["WIZARD_VERSION"] = WIZARD_VERSION
return pickle.dumps(project_data)
def createBertosProject(project_info, edit=False):
f = open(directory + "/project.bertos", "w")
f.write(projectFileGenerator(project_info))
f.close()
+ # VERSION file
+ version_file = open(os.path.join(const.DATA_DIR, "vtemplates/VERSION"), "r").read()
+ open(directory + "/VERSION", "w").write(versionFileGenerator(project_info, version_file))
# Destination source dir
srcdir = directory + "/bertos"
if not edit:
mergeSources(srcdir, sources_dir, old_sources_dir)
# Destination makefile
makefile = directory + "/Makefile"
- makefile = open("mktemplates/Makefile").read()
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), 'r').read()
makefile = makefileGenerator(project_info, makefile)
open(directory + "/Makefile", "w").write(makefile)
# Destination project dir
- prjdir = directory + "/" + os.path.basename(directory)
+ # prjdir = directory + "/" + os.path.basename(directory)
+ prjdir = os.path.join(directory, project_info.info("PROJECT_NAME"))
if not edit:
shutil.rmtree(prjdir, True)
os.mkdir(prjdir)
f.close()
if not edit:
# Destination user mk file (only on project creation)
- makefile = open("mktemplates/template.mk", "r").read()
- makefile = mkGenerator(project_info, makefile)
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read()
+ # Deadly performances loss was here :(
+ makefile = userMkGenerator(project_info, makefile)
open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
# Destination wizard mk file
- makefile = open("mktemplates/template_wiz.mk", "r").read()
+ makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_wiz.mk"), "r").read()
makefile = mkGenerator(project_info, makefile)
open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile)
# Destination main.c file
if not edit:
- main = open("srctemplates/main.c", "r").read()
+ main = open(os.path.join(const.DATA_DIR, "srctemplates/main.c"), "r").read()
open(prjdir + "/main.c", "w").write(main)
# Files for selected plugins
relevants_files = {}
Returns the given plugin module.
"""
return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
-
+
+def versionFileGenerator(project_info, version_file):
+ version = bertosVersion(project_info.info("SOURCES_PATH"))
+ return version_file.replace('$version', version)
+
+def userMkGenerator(project_info, makefile):
+ mk_data = {}
+ mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
+ mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
+ for key in mk_data:
+ while makefile.find(key) != -1:
+ makefile = makefile.replace(key, mk_data[key])
+ return makefile
+
def mkGenerator(project_info, makefile):
"""
Generates the mk file for the current project.
for file in information["hw"]:
if file.endswith(".c"):
module_files |= set([hwdir + "/" + os.path.basename(file)])
- for file_dependency in information["depends"]:
- if file_dependency in files:
+ for file_dependency in information["depends"] + tuple(files.keys()):
dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
dependency_files |= set(dependencyCFiles)
asm_files |= set(dependencySFiles)
cxxsrc.append(file)
for file in project_info.info("CPU_INFOS")["ASRC"]:
asrc.append(file)
+ csrc = set(csrc)
csrc = " \\\n\t".join(csrc) + " \\"
+ pcsrc = set(pcsrc)
pcsrc = " \\\n\t".join(pcsrc) + " \\"
+ cppasrc = set(cppasrc)
cppasrc = " \\\n\t".join(cppasrc) + " \\"
+ cxxsrc = set(cxxsrc)
cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
+ asrc = set(asrc)
asrc = " \\\n\t".join(asrc) + " \\"
constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()])
return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
cfiles = []
sfiles = []
# .c files related to the module and the cpu architecture
- for filename, path in findDefinitions(module + ".c", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info):
+ for filename, path in project_info.searchFiles(module + ".c"):
path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
path = replaceSeparators(path)
cfiles.append(path + "/" + filename)
# .s files related to the module and the cpu architecture
- for filename, path in findDefinitions(module + ".s", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \
- findDefinitions(module + ".S", project_info) + \
- findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info):
+ for filename, path in project_info.searchFiles(module + ".s") + \
+ project_info.searchFiles(module + ".S"):
path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
path = replaceSeparators(path)
sfiles.append(path + "/" + filename)
# .c and .s files related to the module and the cpu tags
- for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
- for filename, path in findDefinitions(module + "_" + tag + ".c", project_info):
+ tags = project_info.info("CPU_INFOS")["CPU_TAGS"]
+
+ # Awful, but secure check for version
+ # TODO: split me in a method/function
+ try:
+ version_string = bertosVersion(project_info.info("SOURCES_PATH"))
+ version_list = [int(i) for i in version_string.split()[-1].split('.')]
+ if version_list < [2, 5]:
+ # For older versions of BeRTOS add the toolchain to the tags
+ tags.append(project_info.info("CPU_INFOS")["TOOLCHAIN"])
+ except ValueError:
+ # If the version file hasn't a valid version number do nothing
+ pass
+
+ for tag in tags:
+ for filename, path in project_info.searchFiles(module + "_" + tag + ".c"):
path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
if os.sep != "/":
path = replaceSeparators(path)
cfiles.append(path + "/" + filename)
- for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \
- findDefinitions(module + "_" + tag + ".S", project_info):
+ for filename, path in project_info.searchFiles(module + "_" + tag + ".s") + \
+ project_info.searchFiles(module + "_" + tag + ".S"):
path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
path = replaceSeparators(path)
sfiles.append(path + "/" + filename)
name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
return name
-def loadSourceTree(project):
- fileList = [f for f in os.walk(project.info("SOURCES_PATH"))]
- project.setInfo("FILE_LIST", fileList)
-
-def findDefinitions(ftype, project):
- L = project.info("FILE_LIST")
- definitions = []
- for element in L:
- for filename in element[2]:
- if fnmatch.fnmatch(filename, ftype):
- definitions.append((filename, element[0]))
- return definitions
-
-def loadCpuInfos(project):
- cpuInfos = []
- for definition in findDefinitions(const.CPU_DEFINITION, project):
- cpuInfos.append(getInfos(definition))
- return cpuInfos
-
def getTagSet(cpu_info):
tag_set = set([])
for cpu in cpu_info:
block.append(([comment], define, start))
return block
-def loadModuleData(project, edit=False):
- module_info_dict = {}
- list_info_dict = {}
- configuration_info_dict = {}
- file_dict = {}
- for filename, path in findDefinitions("*.h", project) + findDefinitions("*.c", project) + findDefinitions("*.s", project) + findDefinitions("*.S", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- if len(comment_list) > 0:
- module_info = {}
- configuration_info = {}
- try:
- to_be_parsed, module_dict = loadModuleDefinition(comment_list[0])
- except ParseError, err:
- raise DefineException.ModuleDefineException(path, err.line_number, err.line)
- for module, information in module_dict.items():
- if "depends" not in information:
- information["depends"] = ()
- information["depends"] += (filename.split(".")[0],)
- information["category"] = os.path.basename(path)
- if "configuration" in information and len(information["configuration"]):
- configuration = module_dict[module]["configuration"]
- try:
- configuration_info[configuration] = loadConfigurationInfos(project.info("SOURCES_PATH") + "/" + configuration)
- except ParseError, err:
- raise DefineException.ConfigurationDefineException(project.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line)
- if edit:
- try:
- path = os.path.basename(project.info("PROJECT_PATH"))
- directory = project.info("PROJECT_PATH")
- user_configuration = loadConfigurationInfos(directory + "/" + configuration.replace("bertos", path))
- configuration_info[configuration] = updateConfigurationValues(configuration_info[configuration], user_configuration)
- except ParseError, err:
- raise DefineException.ConfigurationDefineException(directory + "/" + configuration.replace("bertos", path))
- module_info_dict.update(module_dict)
- configuration_info_dict.update(configuration_info)
- if to_be_parsed:
- try:
- list_dict = loadDefineLists(comment_list[1:])
- list_info_dict.update(list_dict)
- except ParseError, err:
- raise DefineException.EnumDefineException(path, err.line_number, err.line)
- for filename, path in findDefinitions("*_" + project.info("CPU_INFOS")["TOOLCHAIN"] + ".h", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- list_info_dict.update(loadDefineLists(comment_list))
- for tag in project.info("CPU_INFOS")["CPU_TAGS"]:
- for filename, path in findDefinitions("*_" + tag + ".h", project):
- comment_list = getCommentList(open(path + "/" + filename, "r").read())
- list_info_dict.update(loadDefineLists(comment_list))
- project.setInfo("MODULES", module_info_dict)
- project.setInfo("LISTS", list_info_dict)
- project.setInfo("CONFIGURATIONS", configuration_info_dict)
- project.setInfo("FILES", file_dict)
-
def formatParamNameValue(text):
"""
Take the given string and return a tuple with the name of the parameter in the first position
"long": boolean indicating if the num is a long
"unsigned": boolean indicating if the num is an unsigned
"value_list": the name of the enum for enum parameters
+ "conditional_deps": the list of conditional dependencies for boolean parameters
"""
configuration_infos = {}
configuration_infos["paramlist"] = []
configuration_infos[name]["value"].find("U") != -1):
configuration_infos[name]["informations"]["unsigned"] = True
configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "")
+ if "conditional_deps" in configuration_infos[name]["informations"]:
+ if (type(configuration_infos[name]["informations"]["conditional_deps"]) == str or
+ type(configuration_infos[name]["informations"]["conditional_deps"]) == unicode):
+ configuration_infos[name]["informations"]["conditional_deps"] = (configuration_infos[name]["informations"]["conditional_deps"], )
+ elif type(configuration_infos[name]["informations"]["conditional_deps"]) == tuple:
+ pass
+ else:
+ configuration_infos[name]["informations"]["conditional_deps"] = ()
configuration_infos[name]["description"] = description
configuration_infos[name]["brief"] = brief
return configuration_infos