+def loadBertosProject(project_file):
+ project_data = pickle.loads(open(project_file, "r").read())
+ project_info = BProject.BProject()
+ project_info.setInfo("PROJECT_PATH", os.path.dirname(project_file))
+ project_info.setInfo("SOURCES_PATH", project_data["SOURCES_PATH"])
+ project_info.setInfo("TOOLCHAIN", project_data["TOOLCHAIN"])
+ project_info.setInfo("SELECTED_FREQ", project_data["SELECTED_FREQ"])
+ project_info.setInfo("OUTPUT", project_data["OUTPUT"])
+ loadSourceTree(project_info)
+ cpu_name = project_data["CPU_NAME"]
+ project_info.setInfo("CPU_NAME", cpu_name)
+ cpu_info = loadCpuInfos(project_info)
+ for cpu in cpu_info:
+ if cpu["CPU_NAME"] == cpu_name:
+ project_info.setInfo("CPU_INFOS", cpu)
+ break
+ tag_list = getTagSet(cpu_info)
+ # Create, fill and store the dict with the tags
+ tag_dict = {}
+ for element in tag_list:
+ tag_dict[element] = False
+ infos = project_info.info("CPU_INFOS")
+ for tag in tag_dict:
+ if tag in infos["CPU_TAGS"] + [infos["CPU_NAME"], infos["TOOLCHAIN"]]:
+ tag_dict[tag] = True
+ else:
+ tag_dict[tag] = False
+ project_info.setInfo("ALL_CPU_TAGS", tag_dict)
+ loadModuleData(project_info, True)
+ setEnabledModules(project_info, project_data["ENABLED_MODULES"])
+ return project_info
+
+def setEnabledModules(project_info, enabled_modules):
+ modules = project_info.info("MODULES")
+ for module, information in modules.items():
+ information["enabled"] = module in enabled_modules
+ project_info.setInfo("MODULES", modules)
+
+def enabledModules(project_info):
+ enabled_modules = []
+ for name, module in project_info.info("MODULES").items():
+ if module["enabled"]:
+ enabled_modules.append(name)
+ return enabled_modules
+
+def mergeSources(srcdir, new_sources, old_sources):
+ # The current mergeSources function provide only a raw copy of the sources in the
+ # created project.
+ #
+ # TODO: implement the three way merge algorithm
+ #
+ shutil.rmtree(srcdir, True)
+ copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
+
+def projectFileGenerator(project_info):
+ directory = project_info.info("PROJECT_PATH")
+ project_data = {}
+ enabled_modules = []
+ for module, information in project_info.info("MODULES").items():
+ if information["enabled"]:
+ enabled_modules.append(module)
+ project_data["ENABLED_MODULES"] = enabled_modules
+ project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH")
+ project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN")
+ project_data["CPU_NAME"] = project_info.info("CPU_NAME")
+ project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ")
+ project_data["OUTPUT"] = project_info.info("OUTPUT")
+ return pickle.dumps(project_data)
+
+def createBertosProject(project_info, edit=False):
+ directory = project_info.info("PROJECT_PATH")
+ sources_dir = project_info.info("SOURCES_PATH")
+ old_sources_dir = project_info.info("OLD_SOURCES_PATH")
+ if not edit:
+ if os.path.isdir(directory):
+ shutil.rmtree(directory, True)
+ os.makedirs(directory)
+ # Write the project file
+ f = open(directory + "/project.bertos", "w")
+ f.write(projectFileGenerator(project_info))
+ f.close()
+ # Destination source dir
+ srcdir = directory + "/bertos"
+ if not edit:
+ # If not in editing mode it copies all the bertos sources in the /bertos subdirectory of the project
+ shutil.rmtree(srcdir, True)
+ copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST)
+ elif old_sources_dir:
+ # If in editing mode it merges the current bertos sources with the selected ones
+ # TODO: implement the three way merge algotihm
+ #
+ mergeSources(srcdir, sources_dir, old_sources_dir)
+ # Destination makefile
+ makefile = directory + "/Makefile"
+ makefile = open("mktemplates/Makefile").read()
+ makefile = makefileGenerator(project_info, makefile)
+ open(directory + "/Makefile", "w").write(makefile)
+ # Destination project dir
+ prjdir = directory + "/" + os.path.basename(directory)
+ if not edit:
+ shutil.rmtree(prjdir, True)
+ os.mkdir(prjdir)
+ # Destination hw files
+ hwdir = prjdir + "/hw"
+ if not edit:
+ shutil.rmtree(hwdir, True)
+ os.mkdir(hwdir)
+ # Copy all the hw files
+ for module, information in project_info.info("MODULES").items():
+ for hwfile in information["hw"]:
+ string = open(sources_dir + "/" + hwfile, "r").read()
+ hwfile_path = hwdir + "/" + os.path.basename(hwfile)
+ if not edit or not os.path.exists(hwfile_path):
+ # If not in editing mode it copies all the hw files. If in
+ # editing mode it copies only the files that don't exist yet
+ open(hwdir + "/" + os.path.basename(hwfile), "w").write(string)
+ # Destination configurations files
+ cfgdir = prjdir + "/cfg"
+ if not edit:
+ shutil.rmtree(cfgdir, True)
+ os.mkdir(cfgdir)
+ # Set properly the autoenabled parameters
+ for module, information in project_info.info("MODULES").items():
+ if "configuration" in information and information["configuration"] != "":
+ configurations = project_info.info("CONFIGURATIONS")
+ configuration = configurations[information["configuration"]]
+ for start, parameter in configuration["paramlist"]:
+ if "type" in configuration[parameter]["informations"] and configuration[parameter]["informations"]["type"] == "autoenabled":
+ configuration[parameter]["value"] = "1" if information["enabled"] else "0"
+ project_info.setInfo("CONFIGURATIONS", configurations)
+ # Copy all the configuration files
+ for configuration, information in project_info.info("CONFIGURATIONS").items():
+ string = open(sources_dir + "/" + configuration, "r").read()
+ for start, parameter in information["paramlist"]:
+ infos = information[parameter]
+ value = infos["value"]
+ if "unsigned" in infos["informations"] and infos["informations"]["unsigned"]:
+ value += "U"
+ if "long" in infos["informations"] and infos["informations"]["long"]:
+ value += "L"
+ string = sub(string, parameter, value)
+ f = open(cfgdir + "/" + os.path.basename(configuration), "w")
+ f.write(string)
+ f.close()
+ if not edit:
+ # Destination user mk file (only on project creation)
+ makefile = open("mktemplates/template.mk", "r").read()
+ makefile = mkGenerator(project_info, makefile)
+ open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile)
+ # Destination wizard mk file
+ makefile = open("mktemplates/template_wiz.mk", "r").read()
+ makefile = mkGenerator(project_info, makefile)
+ open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile)
+ # Destination main.c file
+ if not edit:
+ main = open("srctemplates/main.c", "r").read()
+ open(prjdir + "/main.c", "w").write(main)
+ # Files for selected plugins
+ relevants_files = {}
+ for plugin in project_info.info("OUTPUT"):
+ module = loadPlugin(plugin)
+ relevants_files[plugin] = module.createProject(project_info)
+ project_info.setInfo("RELEVANT_FILES", relevants_files)
+
+def loadPlugin(plugin):
+ """
+ Returns the given plugin module.
+ """
+ return getattr(__import__("plugins", {}, {}, [plugin]), plugin)
+
+def mkGenerator(project_info, makefile):
+ """
+ Generates the mk file for the current project.
+ """
+ mk_data = {}
+ mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH"))
+ mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ")
+ cpu_mk_parameters = []
+ for key, value in project_info.info("CPU_INFOS").items():
+ if key.startswith(const.MK_PARAM_ID):
+ cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value))
+ mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters)
+ mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info)
+ mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[0])
+ mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[1])
+ mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c"
+ for key in mk_data:
+ while makefile.find(key) != -1:
+ makefile = makefile.replace(key, mk_data[key])
+ return makefile
+
+def makefileGenerator(project_info, makefile):
+ """
+ Generate the Makefile for the current project.
+ """
+ # TODO write a general function that works for both the mk file and the Makefile
+ while makefile.find("$pname") != -1:
+ makefile = makefile.replace("$pname", os.path.basename(project_info.info("PROJECT_PATH")))
+ return makefile
+
+def csrcGenerator(project_info):
+ modules = project_info.info("MODULES")
+ files = project_info.info("FILES")
+ if "harvard" in project_info.info("CPU_INFOS")["CPU_TAGS"]:
+ harvard = True
+ else:
+ harvard = False
+ # file to be included in CSRC variable
+ csrc = []
+ # file to be included in PCSRC variable
+ pcsrc = []
+ # files to be included in CPPASRC variable
+ cppasrc = []
+ # files to be included in CXXSRC variable
+ cxxsrc = []
+ # files to be included in ASRC variable
+ asrc = []
+ # constants to be included at the beginning of the makefile
+ constants = {}
+ for module, information in modules.items():
+ module_files = set([])
+ dependency_files = set([])
+ # assembly sources
+ asm_files = set([])
+ hwdir = os.path.basename(project_info.info("PROJECT_PATH")) + "/hw"
+ if information["enabled"]:
+ if "constants" in information:
+ constants.update(information["constants"])
+ cfiles, sfiles = findModuleFiles(module, project_info)
+ module_files |= set(cfiles)
+ asm_files |= set(sfiles)
+ for file in information["hw"]:
+ if file.endswith(".c"):
+ module_files |= set([hwdir + "/" + os.path.basename(file)])
+ for file_dependency in information["depends"]:
+ if file_dependency in files:
+ dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info)
+ dependency_files |= set(dependencyCFiles)
+ asm_files |= set(dependencySFiles)
+ for file in module_files:
+ if not harvard or information.get("harvard", "both") == "both":
+ csrc.append(file)
+ if harvard and "harvard" in information:
+ pcsrc.append(file)
+ for file in dependency_files:
+ csrc.append(file)
+ for file in project_info.info("CPU_INFOS")["C_SRC"]:
+ csrc.append(file)
+ for file in project_info.info("CPU_INFOS")["PC_SRC"]:
+ pcsrc.append(file)
+ for file in asm_files:
+ cppasrc.append(file)
+ for file in project_info.info("CPU_INFOS")["CPPA_SRC"]:
+ cppasrc.append(file)
+ for file in project_info.info("CPU_INFOS")["CXX_SRC"]:
+ cxxsrc.append(file)
+ for file in project_info.info("CPU_INFOS")["ASRC"]:
+ asrc.append(file)
+ csrc = " \\\n\t".join(csrc) + " \\"
+ pcsrc = " \\\n\t".join(pcsrc) + " \\"
+ cppasrc = " \\\n\t".join(cppasrc) + " \\"
+ cxxsrc = " \\\n\t".join(cxxsrc) + " \\"
+ asrc = " \\\n\t".join(asrc) + " \\"
+ constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()])
+ return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants
+
+def findModuleFiles(module, project_info):
+ # Find the files related to the selected module
+ cfiles = []
+ sfiles = []
+ # .c files related to the module and the cpu architecture
+ for filename, path in findDefinitions(module + ".c", project_info) + \
+ findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info):
+ path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
+ path = replaceSeparators(path)
+ cfiles.append(path + "/" + filename)
+ # .s files related to the module and the cpu architecture
+ for filename, path in findDefinitions(module + ".s", project_info) + \
+ findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \
+ findDefinitions(module + ".S", project_info) + \
+ findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info):
+ path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
+ path = replaceSeparators(path)
+ sfiles.append(path + "/" + filename)
+ # .c and .s files related to the module and the cpu tags
+ for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]:
+ for filename, path in findDefinitions(module + "_" + tag + ".c", project_info):
+ path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
+ if os.sep != "/":
+ path = replaceSeparators(path)
+ cfiles.append(path + "/" + filename)
+ for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \
+ findDefinitions(module + "_" + tag + ".S", project_info):
+ path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "")
+ path = replaceSeparators(path)
+ sfiles.append(path + "/" + filename)
+ return cfiles, sfiles
+
+def replaceSeparators(path):
+ """
+ Replace the separators in the given path with unix standard separator.
+ """
+ if os.sep != "/":
+ while path.find(os.sep) != -1:
+ path = path.replace(os.sep, "/")
+ return path
+
+def getSystemPath():
+ path = os.environ["PATH"]
+ if os.name == "nt":
+ path = path.split(";")
+ else:
+ path = path.split(":")
+ return path
+
+def findToolchains(path_list):
+ toolchains = []
+ for element in path_list:
+ for toolchain in glob.glob(element+ "/" + const.GCC_NAME):
+ toolchains.append(toolchain)
+ return list(set(toolchains))
+
+def getToolchainInfo(output):
+ info = {}
+ expr = re.compile("Target: .*")
+ target = expr.findall(output)
+ if len(target) == 1:
+ info["target"] = target[0].split("Target: ")[1]
+ expr = re.compile("gcc version [0-9,.]*")
+ version = expr.findall(output)
+ if len(version) == 1:
+ info["version"] = version[0].split("gcc version ")[1]
+ expr = re.compile("gcc version [0-9,.]* \(.*\)")
+ build = expr.findall(output)
+ if len(build) == 1:
+ build = build[0].split("gcc version ")[1]
+ build = build[build.find("(") + 1 : build.find(")")]
+ info["build"] = build
+ expr = re.compile("Configured with: .*")
+ configured = expr.findall(output)
+ if len(configured) == 1:
+ info["configured"] = configured[0].split("Configured with: ")[1]
+ expr = re.compile("Thread model: .*")
+ thread = expr.findall(output)
+ if len(thread) == 1:
+ info["thread"] = thread[0].split("Thread model: ")[1]
+ return info
+
+def getToolchainName(toolchain_info):
+ name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip()
+ return name
+
+def loadSourceTree(project):
+ fileList = [f for f in os.walk(project.info("SOURCES_PATH"))]
+ project.setInfo("FILE_LIST", fileList)