X-Git-Url: https://codewiz.org/gitweb?a=blobdiff_plain;f=wizard%2Fbertos_utils.py;h=a30aaed4cdb37f0a3f4c234d67c801ae4f587dfd;hb=bb9ed2f778c62e37fe8dab83fcb6dc9a6928619f;hp=567eeb7087975b3dd23eee8337c1a0faf8a34130;hpb=932cd676e0827be0266c7f665da684faceba0ecf;p=bertos.git diff --git a/wizard/bertos_utils.py b/wizard/bertos_utils.py index 567eeb70..a30aaed4 100644 --- a/wizard/bertos_utils.py +++ b/wizard/bertos_utils.py @@ -45,7 +45,10 @@ import pickle import const import plugins import DefineException -import BProject + +from _wizard_version import WIZARD_VERSION + +from LoadException import VersionException, ToolchainException def isBertosDir(directory): return os.path.exists(directory + "/VERSION") @@ -53,24 +56,53 @@ def isBertosDir(directory): def bertosVersion(directory): return open(directory + "/VERSION").readline().strip() -def loadBertosProject(project_file): - project_data = pickle.loads(open(project_file, "r").read()) - project_info = BProject.BProject() - project_info.setInfo("PROJECT_PATH", os.path.dirname(project_file)) - project_info.setInfo("SOURCES_PATH", project_data["SOURCES_PATH"]) - loadSourceTree(project_info) - cpu_name = project_data["CPU_NAME"] - project_info.setInfo("CPU_NAME", cpu_name) - cpu_info = loadCpuInfos(project_info) - for cpu in cpu_info: - print cpu["CPU_NAME"], cpu_name - if cpu["CPU_NAME"] == cpu_name: - print "sono uguali" - project_info.setInfo("CPU_INFOS", cpu) - break - loadModuleData(project_info) - print project_info +def setEnabledModules(project_info, enabled_modules): + modules = project_info.info("MODULES") + files = {} + for module, information in modules.items(): + information["enabled"] = module in enabled_modules + if information["enabled"]: + for dependency in information["depends"]: + if not dependency in modules: + files[dependency] = files.get(dependency, 0) + 1 + project_info.setInfo("MODULES", modules) + project_info.setInfo("FILES", files) + +def enabledModules(project_info): + enabled_modules = [] + for name, module in project_info.info("MODULES").items(): + if module["enabled"]: + enabled_modules.append(name) + return enabled_modules +def presetList(directory): + """ + Return the list of the preset found in the selected BeRTOS Version. + """ + def getPresetInfo(preset_dir): + # Find and returns information about the preset + # Keys needed for BBoardPage: + # - "name": + # - "description": + + # NOTE: this is only a test stub. + preset_info = pickle.loads(open(os.path.join(preset_dir, 'info'), "r").read()) + return preset_info + abspath = os.path.join(directory, const.PREDEFINED_BOARDS_DIR) + preset_list = dict([ + (os.path.join(abspath, preset_dir), getPresetInfo(os.path.join(abspath, preset_dir))) + for preset_dir in os.listdir(os.path.join(directory, const.PREDEFINED_BOARDS_DIR)) + ]) + return preset_list + +def mergeSources(srcdir, new_sources, old_sources): + # The current mergeSources function provide only a raw copy of the sources in the + # created project. + # + # TODO: implement the three way merge algorithm + # + shutil.rmtree(srcdir, True) + copytree.copytree(os.path.join(new_sources, "bertos"), srcdir, ignore_list=const.IGNORE_LIST) def projectFileGenerator(project_info): directory = project_info.info("PROJECT_PATH") @@ -80,58 +112,81 @@ def projectFileGenerator(project_info): if information["enabled"]: enabled_modules.append(module) project_data["ENABLED_MODULES"] = enabled_modules - project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH") + # Use the local BeRTOS version instead of the original one + # project_data["SOURCES_PATH"] = project_info.info("SOURCES_PATH") + project_data["SOURCES_PATH"] = directory + project_data["PROJECT_NAME"] = project_info.info("PROJECT_NAME", os.path.basename(directory)) project_data["TOOLCHAIN"] = project_info.info("TOOLCHAIN") project_data["CPU_NAME"] = project_info.info("CPU_NAME") - print project_info.info("CPU_NAME") project_data["SELECTED_FREQ"] = project_info.info("SELECTED_FREQ") + project_data["OUTPUT"] = project_info.info("OUTPUT") + project_data["WIZARD_VERSION"] = WIZARD_VERSION return pickle.dumps(project_data) -def createBertosProject(project_info): +def createBertosProject(project_info, edit=False): directory = project_info.info("PROJECT_PATH") sources_dir = project_info.info("SOURCES_PATH") - if os.path.isdir(directory): - shutil.rmtree(directory, True) - os.makedirs(directory) + old_sources_dir = project_info.info("OLD_SOURCES_PATH") + if not edit: + if os.path.isdir(directory): + shutil.rmtree(directory, True) + os.makedirs(directory) + # Write the project file f = open(directory + "/project.bertos", "w") f.write(projectFileGenerator(project_info)) f.close() + # VERSION file + version_file = open(os.path.join(const.DATA_DIR, "vtemplates/VERSION"), "r").read() + open(directory + "/VERSION", "w").write(versionFileGenerator(project_info, version_file)) # Destination source dir srcdir = directory + "/bertos" - shutil.rmtree(srcdir, True) - copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST) + if not edit: + # If not in editing mode it copies all the bertos sources in the /bertos subdirectory of the project + shutil.rmtree(srcdir, True) + copytree.copytree(sources_dir + "/bertos", srcdir, ignore_list=const.IGNORE_LIST) + elif old_sources_dir: + # If in editing mode it merges the current bertos sources with the selected ones + # TODO: implement the three way merge algotihm + # + mergeSources(srcdir, sources_dir, old_sources_dir) # Destination makefile makefile = directory + "/Makefile" - if os.path.exists(makefile): - os.remove(makefile) - makefile = open("mktemplates/Makefile").read() + makefile = open(os.path.join(const.DATA_DIR, "mktemplates/Makefile"), 'r').read() makefile = makefileGenerator(project_info, makefile) open(directory + "/Makefile", "w").write(makefile) # Destination project dir - prjdir = directory + "/" + os.path.basename(directory) - shutil.rmtree(prjdir, True) - os.mkdir(prjdir) + # prjdir = directory + "/" + os.path.basename(directory) + prjdir = os.path.join(directory, project_info.info("PROJECT_NAME")) + if not edit: + shutil.rmtree(prjdir, True) + os.mkdir(prjdir) # Destination hw files hwdir = prjdir + "/hw" - shutil.rmtree(hwdir, True) - os.mkdir(hwdir) + if not edit: + shutil.rmtree(hwdir, True) + os.mkdir(hwdir) # Copy all the hw files for module, information in project_info.info("MODULES").items(): for hwfile in information["hw"]: string = open(sources_dir + "/" + hwfile, "r").read() - open(hwdir + "/" + os.path.basename(hwfile), "w").write(string) + hwfile_path = hwdir + "/" + os.path.basename(hwfile) + if not edit or not os.path.exists(hwfile_path): + # If not in editing mode it copies all the hw files. If in + # editing mode it copies only the files that don't exist yet + open(hwdir + "/" + os.path.basename(hwfile), "w").write(string) # Destination configurations files cfgdir = prjdir + "/cfg" - shutil.rmtree(cfgdir, True) - os.mkdir(cfgdir) - # Set to 1 the autoenabled for enabled modules + if not edit: + shutil.rmtree(cfgdir, True) + os.mkdir(cfgdir) + # Set properly the autoenabled parameters for module, information in project_info.info("MODULES").items(): - if information["enabled"] and "configuration" in information and information["configuration"] != "": + if "configuration" in information and information["configuration"] != "": configurations = project_info.info("CONFIGURATIONS") configuration = configurations[information["configuration"]] for start, parameter in configuration["paramlist"]: if "type" in configuration[parameter]["informations"] and configuration[parameter]["informations"]["type"] == "autoenabled": - configuration[parameter]["value"] = "1" + configuration[parameter]["value"] = "1" if information["enabled"] else "0" project_info.setInfo("CONFIGURATIONS", configurations) # Copy all the configuration files for configuration, information in project_info.info("CONFIGURATIONS").items(): @@ -147,13 +202,20 @@ def createBertosProject(project_info): f = open(cfgdir + "/" + os.path.basename(configuration), "w") f.write(string) f.close() - # Destinatio mk file - makefile = open("mktemplates/template.mk", "r").read() + if not edit: + # Destination user mk file (only on project creation) + makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template.mk"), "r").read() + # Deadly performances loss was here :( + makefile = userMkGenerator(project_info, makefile) + open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile) + # Destination wizard mk file + makefile = open(os.path.join(const.DATA_DIR, "mktemplates/template_wiz.mk"), "r").read() makefile = mkGenerator(project_info, makefile) - open(prjdir + "/" + os.path.basename(prjdir) + ".mk", "w").write(makefile) + open(prjdir + "/" + os.path.basename(prjdir) + "_wiz.mk", "w").write(makefile) # Destination main.c file - main = open("srctemplates/main.c", "r").read() - open(prjdir + "/main.c", "w").write(main) + if not edit: + main = open(os.path.join(const.DATA_DIR, "srctemplates/main.c"), "r").read() + open(prjdir + "/main.c", "w").write(main) # Files for selected plugins relevants_files = {} for plugin in project_info.info("OUTPUT"): @@ -166,23 +228,32 @@ def loadPlugin(plugin): Returns the given plugin module. """ return getattr(__import__("plugins", {}, {}, [plugin]), plugin) - + +def versionFileGenerator(project_info, version_file): + version = bertosVersion(project_info.info("SOURCES_PATH")) + return version_file.replace('$version', version) + +def userMkGenerator(project_info, makefile): + mk_data = {} + mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH")) + mk_data["$main"] = os.path.basename(project_info.info("PROJECT_PATH")) + "/main.c" + for key in mk_data: + while makefile.find(key) != -1: + makefile = makefile.replace(key, mk_data[key]) + return makefile + def mkGenerator(project_info, makefile): """ Generates the mk file for the current project. """ mk_data = {} mk_data["$pname"] = os.path.basename(project_info.info("PROJECT_PATH")) - mk_data["$cpuflag"] = project_info.info("CPU_INFOS")["CPU_FLAG_NAME"] - mk_data["$cpuname"] = project_info.info("CPU_INFOS")["CORE_CPU"] mk_data["$cpuclockfreq"] = project_info.info("SELECTED_FREQ") - mk_data["$cflags"] = " ".join(project_info.info("CPU_INFOS")["C_FLAGS"]) - mk_data["$ldflags"] = " ".join(project_info.info("CPU_INFOS")["LD_FLAGS"]) - mk_data["$cppflags"] = " ".join(project_info.info("CPU_INFOS")["CPP_FLAGS"]) - mk_data["$cppaflags"] = " ".join(project_info.info("CPU_INFOS")["CPPA_FLAGS"]) - mk_data["$cxxflags"] = " ".join(project_info.info("CPU_INFOS")["CXX_FLAGS"]) - mk_data["$asflags"] = " ".join(project_info.info("CPU_INFOS")["AS_FLAGS"]) - mk_data["$arflags"] = " ".join(project_info.info("CPU_INFOS")["AR_FLAGS"]) + cpu_mk_parameters = [] + for key, value in project_info.info("CPU_INFOS").items(): + if key.startswith(const.MK_PARAM_ID): + cpu_mk_parameters.append("%s = %s" %(key.replace("MK", mk_data["$pname"]), value)) + mk_data["$cpuparameters"] = "\n".join(cpu_mk_parameters) mk_data["$csrc"], mk_data["$pcsrc"], mk_data["$cppasrc"], mk_data["$cxxsrc"], mk_data["$asrc"], mk_data["$constants"] = csrcGenerator(project_info) mk_data["$prefix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[0]) mk_data["$suffix"] = replaceSeparators(project_info.info("TOOLCHAIN")["path"].split("gcc")[1]) @@ -197,8 +268,8 @@ def makefileGenerator(project_info, makefile): Generate the Makefile for the current project. """ # TODO write a general function that works for both the mk file and the Makefile - while makefile.find("project_name") != -1: - makefile = makefile.replace("project_name", os.path.basename(project_info.info("PROJECT_PATH"))) + while makefile.find("$pname") != -1: + makefile = makefile.replace("$pname", os.path.basename(project_info.info("PROJECT_PATH"))) return makefile def csrcGenerator(project_info): @@ -235,8 +306,7 @@ def csrcGenerator(project_info): for file in information["hw"]: if file.endswith(".c"): module_files |= set([hwdir + "/" + os.path.basename(file)]) - for file_dependency in information["depends"]: - if file_dependency in files: + for file_dependency in information["depends"] + tuple(files.keys()): dependencyCFiles, dependencySFiles = findModuleFiles(file_dependency, project_info) dependency_files |= set(dependencyCFiles) asm_files |= set(dependencySFiles) @@ -259,10 +329,15 @@ def csrcGenerator(project_info): cxxsrc.append(file) for file in project_info.info("CPU_INFOS")["ASRC"]: asrc.append(file) + csrc = set(csrc) csrc = " \\\n\t".join(csrc) + " \\" + pcsrc = set(pcsrc) pcsrc = " \\\n\t".join(pcsrc) + " \\" + cppasrc = set(cppasrc) cppasrc = " \\\n\t".join(cppasrc) + " \\" + cxxsrc = set(cxxsrc) cxxsrc = " \\\n\t".join(cxxsrc) + " \\" + asrc = set(asrc) asrc = " \\\n\t".join(asrc) + " \\" constants = "\n".join([os.path.basename(project_info.info("PROJECT_PATH")) + "_" + key + " = " + unicode(value) for key, value in constants.items()]) return csrc, pcsrc, cppasrc, cxxsrc, asrc, constants @@ -272,28 +347,28 @@ def findModuleFiles(module, project_info): cfiles = [] sfiles = [] # .c files related to the module and the cpu architecture - for filename, path in findDefinitions(module + ".c", project_info) + \ - findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c", project_info): + for filename, path in project_info.searchFiles(module + ".c") + \ + project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".c"): path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "") path = replaceSeparators(path) cfiles.append(path + "/" + filename) # .s files related to the module and the cpu architecture - for filename, path in findDefinitions(module + ".s", project_info) + \ - findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s", project_info) + \ - findDefinitions(module + ".S", project_info) + \ - findDefinitions(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S", project_info): + for filename, path in project_info.searchFiles(module + ".s") + \ + project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".s") + \ + project_info.searchFiles(module + ".S") + \ + project_info.searchFiles(module + "_" + project_info.info("CPU_INFOS")["TOOLCHAIN"] + ".S"): path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "") path = replaceSeparators(path) sfiles.append(path + "/" + filename) # .c and .s files related to the module and the cpu tags for tag in project_info.info("CPU_INFOS")["CPU_TAGS"]: - for filename, path in findDefinitions(module + "_" + tag + ".c", project_info): + for filename, path in project_info.searchFiles(module + "_" + tag + ".c"): path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "") if os.sep != "/": path = replaceSeparators(path) cfiles.append(path + "/" + filename) - for filename, path in findDefinitions(module + "_" + tag + ".s", project_info) + \ - findDefinitions(module + "_" + tag + ".S", project_info): + for filename, path in project_info.searchFiles(module + "_" + tag + ".s") + \ + project_info.searchFiles(module + "_" + tag + ".S"): path = path.replace(project_info.info("SOURCES_PATH") + os.sep, "") path = replaceSeparators(path) sfiles.append(path + "/" + filename) @@ -349,31 +424,15 @@ def getToolchainInfo(output): info["thread"] = thread[0].split("Thread model: ")[1] return info -def loadSourceTree(project): - fileList = [f for f in os.walk(project.info("SOURCES_PATH"))] - project.setInfo("FILE_LIST", fileList) - -def findDefinitions(ftype, project): - L = project.info("FILE_LIST") - definitions = [] - for element in L: - for filename in element[2]: - if fnmatch.fnmatch(filename, ftype): - definitions.append((filename, element[0])) - return definitions - -def loadCpuInfos(project): - cpuInfos = [] - for definition in findDefinitions(const.CPU_DEFINITION, project): - cpuInfos.append(getInfos(definition)) - return cpuInfos +def getToolchainName(toolchain_info): + name = "GCC " + toolchain_info["version"] + " - " + toolchain_info["target"].strip() + return name def getTagSet(cpu_info): tag_set = set([]) for cpu in cpu_info: tag_set |= set([cpu["CPU_NAME"]]) tag_set |= set(cpu["CPU_TAGS"]) - tag_set |= set([cpu["CORE_CPU"]]) tag_set |= set([cpu["TOOLCHAIN"]]) return tag_set @@ -431,8 +490,7 @@ def loadModuleDefinition(first_comment): del module_definition["module_description"] if const.MODULE_DEFINITION["module_harvard"] in module_definition: harvard = module_definition[const.MODULE_DEFINITION["module_harvard"]] - if harvard == "both" or harvard == "pgm_memory": - module_dict[module_name]["harvard"] = harvard + module_dict[module_name]["harvard"] = harvard del module_definition[const.MODULE_DEFINITION["module_harvard"]] if const.MODULE_DEFINITION["module_hw"] in module_definition: hw = module_definition[const.MODULE_DEFINITION["module_hw"]] @@ -533,51 +591,6 @@ def getDefinitionBlocks(text): block.append(([comment], define, start)) return block -def loadModuleData(project): - module_info_dict = {} - list_info_dict = {} - configuration_info_dict = {} - file_dict = {} - for filename, path in findDefinitions("*.h", project) + findDefinitions("*.c", project) + findDefinitions("*.s", project) + findDefinitions("*.S", project): - comment_list = getCommentList(open(path + "/" + filename, "r").read()) - if len(comment_list) > 0: - module_info = {} - configuration_info = {} - try: - to_be_parsed, module_dict = loadModuleDefinition(comment_list[0]) - except ParseError, err: - raise DefineException.ModuleDefineException(path, err.line_number, err.line) - for module, information in module_dict.items(): - if "depends" not in information: - information["depends"] = () - information["depends"] += (filename.split(".")[0],) - information["category"] = os.path.basename(path) - if "configuration" in information and len(information["configuration"]): - configuration = module_dict[module]["configuration"] - try: - configuration_info[configuration] = loadConfigurationInfos(project.info("SOURCES_PATH") + "/" + configuration) - except ParseError, err: - raise DefineException.ConfigurationDefineException(project.info("SOURCES_PATH") + "/" + configuration, err.line_number, err.line) - module_info_dict.update(module_dict) - configuration_info_dict.update(configuration_info) - if to_be_parsed: - try: - list_dict = loadDefineLists(comment_list[1:]) - list_info_dict.update(list_dict) - except ParseError, err: - raise DefineException.EnumDefineException(path, err.line_number, err.line) - for filename, path in findDefinitions("*_" + project.info("CPU_INFOS")["TOOLCHAIN"] + ".h", project): - comment_list = getCommentList(open(path + "/" + filename, "r").read()) - list_info_dict.update(loadDefineLists(comment_list)) - for tag in project.info("CPU_INFOS")["CPU_TAGS"]: - for filename, path in findDefinitions("*_" + tag + ".h", project): - comment_list = getCommentList(open(path + "/" + filename, "r").read()) - list_info_dict.update(loadDefineLists(comment_list)) - project.setInfo("MODULES", module_info_dict) - project.setInfo("LISTS", list_info_dict) - project.setInfo("CONFIGURATIONS", configuration_info_dict) - project.setInfo("FILES", file_dict) - def formatParamNameValue(text): """ Take the given string and return a tuple with the name of the parameter in the first position @@ -599,6 +612,7 @@ def loadConfigurationInfos(path): "long": boolean indicating if the num is a long "unsigned": boolean indicating if the num is an unsigned "value_list": the name of the enum for enum parameters + "conditional_deps": the list of conditional dependencies for boolean parameters """ configuration_infos = {} configuration_infos["paramlist"] = [] @@ -621,10 +635,24 @@ def loadConfigurationInfos(path): configuration_infos[name]["value"].find("U") != -1): configuration_infos[name]["informations"]["unsigned"] = True configuration_infos[name]["value"] = configuration_infos[name]["value"].replace("U", "") + if "conditional_deps" in configuration_infos[name]["informations"]: + if (type(configuration_infos[name]["informations"]["conditional_deps"]) == str or + type(configuration_infos[name]["informations"]["conditional_deps"]) == unicode): + configuration_infos[name]["informations"]["conditional_deps"] = (configuration_infos[name]["informations"]["conditional_deps"], ) + elif type(configuration_infos[name]["informations"]["conditional_deps"]) == tuple: + pass + else: + configuration_infos[name]["informations"]["conditional_deps"] = () configuration_infos[name]["description"] = description configuration_infos[name]["brief"] = brief return configuration_infos +def updateConfigurationValues(def_conf, user_conf): + for param in def_conf["paramlist"]: + if param[1] in user_conf and "value" in user_conf[param[1]]: + def_conf[param[1]]["value"] = user_conf[param[1]]["value"] + return def_conf + def findParameterType(parameter): if "value_list" in parameter["informations"]: return "enum"