From mboxrd@z Thu Jan 1 00:00:00 1970 Authentication-Results: mx.groups.io; dkim=missing; spf=fail (domain: intel.com, ip: , mailfrom: bob.c.feng@intel.com) Received: from mga01.intel.com (mga01.intel.com []) by groups.io with SMTP; Mon, 29 Jul 2019 01:45:21 -0700 X-Amp-Result: SKIPPED(no attachment in message) X-Amp-File-Uploaded: False Received: from orsmga004.jf.intel.com ([10.7.209.38]) by fmsmga101.fm.intel.com with ESMTP/TLS/DHE-RSA-AES256-GCM-SHA384; 29 Jul 2019 01:45:21 -0700 X-ExtLoop1: 1 X-IronPort-AV: E=Sophos;i="5.64,322,1559545200"; d="scan'208";a="322786699" Received: from shwdepsi1121.ccr.corp.intel.com ([10.239.158.47]) by orsmga004.jf.intel.com with ESMTP; 29 Jul 2019 01:45:19 -0700 From: "Bob Feng" To: devel@edk2.groups.io Cc: Liming Gao , Bob Feng Subject: [Patch 11/11] BaseTools: Enhance Multiple-Process AutoGen Date: Mon, 29 Jul 2019 16:44:56 +0800 Message-Id: <20190729084456.18844-12-bob.c.feng@intel.com> X-Mailer: git-send-email 2.20.1.windows.1 In-Reply-To: <20190729084456.18844-1-bob.c.feng@intel.com> References: <20190729084456.18844-1-bob.c.feng@intel.com> MIME-Version: 1.0 Content-Transfer-Encoding: 8bit BZ: https://bugzilla.tianocore.org/show_bug.cgi?id=1875 1. Set Log queue maxsize as thread number * 10 2. enhance ModuleUniqueBaseName function 3. fix bugs of build option pcd in sub Process 4. enhance error handling 5. fix bug in the function of duplicate modules handling. Cc: Liming Gao Signed-off-by: Bob Feng --- .../Source/Python/AutoGen/AutoGenWorker.py | 55 +++++++++-- BaseTools/Source/Python/AutoGen/DataPipe.py | 11 ++- .../Python/AutoGen/ModuleAutoGenHelper.py | 9 +- .../Source/Python/AutoGen/PlatformAutoGen.py | 49 +++++++--- .../Source/Python/AutoGen/WorkspaceAutoGen.py | 2 + BaseTools/Source/Python/build/build.py | 95 ++++++++++--------- 6 files changed, 148 insertions(+), 73 deletions(-) diff --git a/BaseTools/Source/Python/AutoGen/AutoGenWorker.py b/BaseTools/Source/Python/AutoGen/AutoGenWorker.py index d1c55cffa8d0..de6a17396e12 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGenWorker.py +++ b/BaseTools/Source/Python/AutoGen/AutoGenWorker.py @@ -14,20 +14,24 @@ import Common.GlobalData as GlobalData import Common.EdkLogger as EdkLogger import os from Common.MultipleWorkspace import MultipleWorkspace as mws from AutoGen.AutoGen import AutoGen from Workspace.WorkspaceDatabase import BuildDB -import time + try: from queue import Empty except: from Queue import Empty import traceback import sys from AutoGen.DataPipe import MemoryDataPipe import logging +def clearQ(q): + while not q.empty(): + q.get_nowait() + class LogAgent(threading.Thread): def __init__(self,log_q,log_level,log_file=None): super(LogAgent,self).__init__() self.log_q = log_q self.log_level = log_level @@ -88,45 +92,58 @@ class LogAgent(threading.Thread): self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage()) def kill(self): self.log_q.put(None) class AutoGenManager(threading.Thread): - def __init__(self,autogen_workers, feedback_q): + def __init__(self,autogen_workers, feedback_q,error_event): super(AutoGenManager,self).__init__() self.autogen_workers = autogen_workers self.feedback_q = feedback_q self.Status = True + self.error_event = error_event def run(self): try: + fin_num = 0 while True: badnews = self.feedback_q.get() if badnews is None: + break + if badnews == "Done": + fin_num += 1 + else: self.Status = False self.TerminateWorkers() + if fin_num == len(self.autogen_workers): + self.clearQueue() + for w in self.autogen_workers: + w.join() break except Exception: return - def kill(self): - self.feedback_q.put(None) - + def clearQueue(self): + taskq = self.autogen_workers[0].module_queue + logq = self.autogen_workers[0].log_q + clearQ(taskq) + clearQ(self.feedback_q) + clearQ(logq) def TerminateWorkers(self): - for w in self.autogen_workers: - if w.is_alive(): - w.terminate() - + self.error_event.set() + def kill(self): + self.feedback_q.put(None) class AutoGenWorkerInProcess(mp.Process): - def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock, share_data,log_q): + def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock, share_data,log_q,error_event): mp.Process.__init__(self) self.module_queue = module_queue self.data_pipe_file_path =data_pipe_file_path self.data_pipe = None self.feedback_q = feedback_q self.PlatformMetaFileSet = {} self.file_lock = file_lock self.share_data = share_data self.log_q = log_q + self.error_event = error_event def GetPlatformMetaFile(self,filepath,root): try: return self.PlatformMetaFileSet[(filepath,root)] except: self.PlatformMetaFileSet[(filepath,root)] = filepath @@ -161,17 +178,28 @@ class AutoGenWorkerInProcess(mp.Process): os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") + pcd_from_build_option = [] + for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): + pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1])) + if pcd_tuple[2].strip(): + pcd_id = ".".join((pcd_id,pcd_tuple[2])) + pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3]))) + GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) + libConstPcd = self.data_pipe.Get("LibConstPcd") + Refes = self.data_pipe.Get("REFS") while not self.module_queue.empty(): + if self.error_event.is_set(): + break module_count += 1 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get() modulefullpath = os.path.join(module_root,module_file) taskname = " : ".join((modulefullpath,module_arch)) module_metafile = PathClass(module_file,module_root) @@ -184,18 +212,25 @@ class AutoGenWorkerInProcess(mp.Process): arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe) Ma.IsLibrary = IsLib + if IsLib: + if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in libConstPcd: + Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)] + if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in Refes: + Ma.ReferenceModules = Refes[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)] Ma.CreateCodeFile() Ma.CreateMakeFile(GenFfsList=FfsCmd.get((Ma.MetaFile.File, Ma.Arch),[])) Ma.CreateAsBuiltInf() except Empty: pass except: traceback.print_exc(file=sys.stdout) self.feedback_q.put(taskname) + finally: + self.feedback_q.put("Done") def printStatus(self): print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache()))) print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache()))) groupobj = {} diff --git a/BaseTools/Source/Python/AutoGen/DataPipe.py b/BaseTools/Source/Python/AutoGen/DataPipe.py index 33d2b14c9add..2052084bdb4b 100644 --- a/BaseTools/Source/Python/AutoGen/DataPipe.py +++ b/BaseTools/Source/Python/AutoGen/DataPipe.py @@ -72,11 +72,11 @@ class MemoryDataPipe(DataPipe): #Platform Module Pcds ModulePcds = {} for m in PlatformInfo.Platform.Modules: m_pcds = PlatformInfo.Platform.Modules[m].Pcds if m_pcds: - ModulePcds[(m.File,m.Root)] = [PCD_DATA( + ModulePcds[(m.File,m.Root,m.Arch)] = [PCD_DATA( pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type, pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue, pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges, pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue) for pcd in PlatformInfo.Platform.Modules[m].Pcds.values()] @@ -84,15 +84,22 @@ class MemoryDataPipe(DataPipe): self.DataContainer = {"MOL_PCDS":ModulePcds} #Module's Library Instance ModuleLibs = {} + libModules = {} for m in PlatformInfo.Platform.Modules: module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain] Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain) - ModuleLibs[(m.File,m.Root,module_obj.Arch)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch) for l in Libs] + for lib in Libs: + try: + libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)].append((m.File,m.Root,module_obj.Arch,m.Path)) + except: + libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)] = [(m.File,m.Root,module_obj.Arch,m.Path)] + ModuleLibs[(m.File,m.Root,module_obj.Arch,m.Path)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch,l.MetaFile.Path) for l in Libs] self.DataContainer = {"DEPS":ModuleLibs} + self.DataContainer = {"REFS":libModules} #Platform BuildOptions platform_build_opt = PlatformInfo.EdkIIBuildOption diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py index 5186ca1da3e3..c7591253debd 100644 --- a/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py +++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py @@ -595,14 +595,17 @@ class PlatformInfo(AutoGenInfo): def ApplyLibraryInstance(self,module): alldeps = self.DataPipe.Get("DEPS") if alldeps is None: alldeps = {} - mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch),[]) + mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch,module.MetaFile.Path),[]) retVal = [] - for (file_path,root,arch) in mod_libs: - retVal.append(self.Wa.BuildDatabase[PathClass(file_path,root), arch, self.Target,self.ToolChain]) + for (file_path,root,arch,abs_path) in mod_libs: + libMetaFile = PathClass(file_path,root) + libMetaFile.OriginalPath = PathClass(file_path,root) + libMetaFile.Path = abs_path + retVal.append(self.Wa.BuildDatabase[libMetaFile, arch, self.Target,self.ToolChain]) return retVal ## Parse build_rule.txt in Conf Directory. # # @retval BuildRule object diff --git a/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py b/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py index 9885c6a3a3bf..2a614e6a7134 100644 --- a/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py @@ -131,10 +131,16 @@ class PlatformAutoGen(AutoGen): self.DataPipe = MemoryDataPipe(self.BuildDir) self.DataPipe.FillData(self) return True + def FillData_LibConstPcd(self): + libConstPcd = {} + for LibAuto in self.LibraryAutoGenList: + if LibAuto.ConstPcd: + libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd + self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd} ## hash() operator of PlatformAutoGen # # The platform file path and arch string will be used to represent # hash value of this object # @@ -1080,11 +1086,14 @@ class PlatformAutoGen(AutoGen): @cached_property def GetAllModuleInfo(self,WithoutPcd=True): ModuleLibs = set() for m in self.Platform.Modules: module_obj = self.BuildDatabase[m,self.Arch,self.BuildTarget,self.ToolChain] - Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain) + if not bool(module_obj.LibraryClass): + Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain) + else: + Libs = [] ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs])) if WithoutPcd and module_obj.PcdIsDriver: continue ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass))) @@ -1335,29 +1344,39 @@ class PlatformAutoGen(AutoGen): else: PlatformModuleOptions = {} return ModuleTypeOptions,PlatformModuleOptions + def ModuleGuid(self,Module): + if os.path.basename(Module.MetaFile.File) != os.path.basename(Module.MetaFile.Path): + # + # Length of GUID is 36 + # + return os.path.basename(Module.MetaFile.Path)[:36] + return Module.Guid @cached_property def UniqueBaseName(self): retVal ={} - name_path_map = {} + ModuleNameDict = {} + UniqueName = {} for Module in self._MbList: - name_path_map[Module.BaseName] = set() - for Module in self._MbList: - name_path_map[Module.BaseName].add(Module.MetaFile) - for name in name_path_map: - if len(name_path_map[name]) > 1: - guidset = set() - for metafile in name_path_map[name]: - m = self.BuildDatabase[metafile, self.Arch, self.BuildTarget, self.ToolChain] - retVal[name] = '%s_%s' % (name, m.Guid) - guidset.add(m.Guid) - samemodules = list(name_path_map[name]) - if len(guidset) > 1: - EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n' + unique_base_name = '%s_%s' % (Module.BaseName,self.ModuleGuid(Module)) + if unique_base_name not in ModuleNameDict: + ModuleNameDict[unique_base_name] = [] + ModuleNameDict[unique_base_name].append(Module.MetaFile) + if Module.BaseName not in UniqueName: + UniqueName[Module.BaseName] = set() + UniqueName[Module.BaseName].add((self.ModuleGuid(Module),Module.MetaFile)) + for module_paths in ModuleNameDict.values(): + if len(module_paths) > 1 and len(set(module_paths))>1: + samemodules = list(set(module_paths)) + EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n' ' %s\n %s' % (samemodules[0], samemodules[1])) + for name in UniqueName: + Guid_Path = UniqueName[name] + if len(Guid_Path) > 1: + retVal[name] = '%s_%s' % (name,Guid_Path.pop()[0]) return retVal ## Expand * in build option key # # @param Options Options to be expanded # @param ToolDef Use specified ToolDef instead of full version. diff --git a/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py b/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py index ab58b21772c3..4ad92653a238 100644 --- a/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py @@ -111,10 +111,12 @@ class WorkspaceAutoGen(AutoGen): self.ProcessModuleFromPdf() self.ProcessPcdType() self.ProcessMixedPcd() self.VerifyPcdsFromFDF() self.CollectAllPcds() + for Pa in self.AutoGenObjectList: + Pa.FillData_LibConstPcd() self.GeneratePkgLevelHash() # # Check PCDs token value conflict in each DEC file. # self._CheckAllPcdsTokenValueConflict() diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index 603d3aa6dad4..dc92495f3f08 100644 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -707,11 +707,11 @@ class Build(): self.Fdf = BuildOptions.FdfFile self.FdList = BuildOptions.RomImage self.FvList = BuildOptions.FvImage self.CapList = BuildOptions.CapName self.SilentMode = BuildOptions.SilentMode - self.ThreadNumber = BuildOptions.ThreadNumber + self.ThreadNumber = 1 self.SkipAutoGen = BuildOptions.SkipAutoGen self.Reparse = BuildOptions.Reparse self.SkuId = BuildOptions.SkuId if self.SkuId: GlobalData.gSKUID_CMD = self.SkuId @@ -812,31 +812,32 @@ class Build(): EdkLogger.info("") os.chdir(self.WorkspaceDir) self.share_data = Manager().dict() self.log_q = log_q def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,share_data): - if SkipAutoGen: - return - feedback_q = mp.Queue() - file_lock = mp.Lock() - auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,share_data,self.log_q) for _ in range(self.ThreadNumber)] - self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q) - self.AutoGenMgr.start() - for w in auto_workers: - w.start() - if PcdMaList is not None: - for PcdMa in PcdMaList: - PcdMa.CreateCodeFile(True) - PcdMa.CreateMakeFile(GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.File, PcdMa.Arch),[])) - PcdMa.CreateAsBuiltInf() - for w in auto_workers: - w.join() - rt = self.AutoGenMgr.Status - self.AutoGenMgr.kill() - self.AutoGenMgr.join() - self.AutoGenMgr = None - return rt + try: + if SkipAutoGen: + return True,0 + feedback_q = mp.Queue() + file_lock = mp.Lock() + error_event = mp.Event() + auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,file_lock,share_data,self.log_q,error_event) for _ in range(self.ThreadNumber)] + self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event) + self.AutoGenMgr.start() + for w in auto_workers: + w.start() + if PcdMaList is not None: + for PcdMa in PcdMaList: + PcdMa.CreateCodeFile(True) + PcdMa.CreateMakeFile(GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.File, PcdMa.Arch),[])) + PcdMa.CreateAsBuiltInf() + + self.AutoGenMgr.join() + rt = self.AutoGenMgr.Status + return rt, 0 + except Exception as e: + return False,e.errcode ## Load configuration # # This method will parse target.txt and get the build configurations. # @@ -880,23 +881,10 @@ class Build(): ToolChainFamily.append(TAB_COMPILER_MSFT) else: ToolChainFamily.append(ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool]) self.ToolChainFamily = ToolChainFamily - if self.ThreadNumber is None: - self.ThreadNumber = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER] - if self.ThreadNumber == '': - self.ThreadNumber = 0 - else: - self.ThreadNumber = int(self.ThreadNumber, 0) - - if self.ThreadNumber == 0: - try: - self.ThreadNumber = multiprocessing.cpu_count() - except (ImportError, NotImplementedError): - self.ThreadNumber = 1 - if not self.PlatformFile: PlatformFile = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_ACTIVE_PLATFORM] if not PlatformFile: # Try to find one in current directory WorkingDirectory = os.getcwd() @@ -911,10 +899,11 @@ class Build(): EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No active platform specified in target.txt or command line! Nothing can be built.\n") self.PlatformFile = PathClass(NormFile(PlatformFile, self.WorkspaceDir), self.WorkspaceDir) + self.ThreadNumber = ThreadNum() ## Initialize build configuration # # This method will parse DSC file and merge the configurations from # command line and target.txt, then get the final build configurations. # @@ -1213,12 +1202,16 @@ class Build(): AutoGenObject.DataPipe.DataContainer = {"FfsCommand":FfsCommand} self.Progress.Start("Generating makefile and code") data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch)) AutoGenObject.DataPipe.dump(data_pipe_file) - autogen_rt = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data) + autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data) self.Progress.Stop("done!") + if not autogen_rt: + self.AutoGenMgr.TerminateWorkers() + self.AutoGenMgr.join(0.1) + raise FatalError(errorcode) return autogen_rt else: # always recreate top/platform makefile when clean, just in case of inconsistency AutoGenObject.CreateCodeFile(False) AutoGenObject.CreateMakeFile(False) @@ -1717,10 +1710,11 @@ class Build(): Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe) if Ma is None: continue if Ma.PcdIsDriver: Ma.PlatformInfo = Pa + Ma.Workspace = Wa PcdMaList.append(Ma) self.BuildModules.append(Ma) self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict,PcdMaList=PcdMaList) # Create MAP file when Load Fix Address is enabled. @@ -2045,14 +2039,15 @@ class Build(): mqueue = mp.Queue() for m in Pa.GetAllModuleInfo: mqueue.put(m) data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch)) Pa.DataPipe.dump(data_pipe_file) - autogen_rt = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data) - + autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList,self.share_data) if not autogen_rt: - return + self.AutoGenMgr.TerminateWorkers() + self.AutoGenMgr.join(0.1) + raise FatalError(errorcode) self.AutoGenTime += int(round((time.time() - AutoGenStart))) self.Progress.Stop("done!") for Arch in Wa.ArchList: MakeStart = time.time() for Ma in BuildModules: @@ -2286,20 +2281,35 @@ def LogBuildTime(Time): TimeDurStr = time.strftime("%H:%M:%S", TimeDur) return TimeDurStr else: return None +def ThreadNum(): + ThreadNumber = BuildOption.ThreadNumber + if ThreadNumber is None: + ThreadNumber = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER] + if ThreadNumber == '': + ThreadNumber = 0 + else: + ThreadNumber = int(ThreadNumber, 0) + + if ThreadNumber == 0: + try: + ThreadNumber = multiprocessing.cpu_count() + except (ImportError, NotImplementedError): + ThreadNumber = 1 + return ThreadNumber ## Tool entrance method # # This method mainly dispatch specific methods per the command line options. # If no error found, return zero value so the caller of this tool can know # if it's executed successfully or not. # # @retval 0 Tool was successful # @retval 1 Tool failed # -LogQMaxSize = 60 +LogQMaxSize = ThreadNum() * 10 def Main(): StartTime = time.time() # # Create a log Queue @@ -2432,13 +2442,11 @@ def Main(): else: EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False) ReturnCode = FORMAT_INVALID except KeyboardInterrupt: if MyBuild is not None: - if MyBuild.AutoGenMgr: - MyBuild.AutoGenMgr.TerminateWorkers() - MyBuild.AutoGenMgr.kill() + # for multi-thread build exits safely MyBuild.Relinquish() ReturnCode = ABORT_ERROR if Option is not None and Option.debug is not None: EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) @@ -2495,10 +2503,11 @@ def Main(): Log_Agent.kill() Log_Agent.join() return ReturnCode if __name__ == '__main__': + mp.set_start_method('spawn') r = Main() ## 0-127 is a safe return range, and 1 is a standard default error if r < 0 or r > 127: r = 1 sys.exit(r) -- 2.20.1.windows.1