* [PATCH v1 1/1] BaseTools: Remove equality operator with None
[not found] <cover.1522095913.git.jaben.carsey@intel.com>
@ 2018-03-26 20:25 ` Jaben Carsey
2018-03-28 2:55 ` Zhu, Yonghong
0 siblings, 1 reply; 2+ messages in thread
From: Jaben Carsey @ 2018-03-26 20:25 UTC (permalink / raw)
To: edk2-devel; +Cc: Yonghong Zhu, Liming Gao
replace "== None" with "is None" and "!= None" with "is not None"
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Jaben Carsey <jaben.carsey@intel.com>
---
BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py | 20 +--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py | 6 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py | 10 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py | 6 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py | 74 ++++----
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py | 32 ++--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py | 32 ++--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py | 12 +-
BaseTools/Source/Python/AutoGen/AutoGen.py | 176 +++++++++---------
BaseTools/Source/Python/AutoGen/BuildEngine.py | 10 +-
BaseTools/Source/Python/AutoGen/GenC.py | 4 +-
BaseTools/Source/Python/AutoGen/GenDepex.py | 10 +-
BaseTools/Source/Python/AutoGen/GenMake.py | 6 +-
BaseTools/Source/Python/AutoGen/GenPcdDb.py | 2 +-
BaseTools/Source/Python/AutoGen/IdfClassObject.py | 4 +-
BaseTools/Source/Python/AutoGen/StrGather.py | 6 +-
BaseTools/Source/Python/AutoGen/UniClassObject.py | 22 +--
BaseTools/Source/Python/BPDG/BPDG.py | 10 +-
BaseTools/Source/Python/BPDG/GenVpd.py | 4 +-
BaseTools/Source/Python/Common/DecClassObject.py | 2 +-
BaseTools/Source/Python/Common/Dictionary.py | 2 +-
BaseTools/Source/Python/Common/DscClassObject.py | 4 +-
BaseTools/Source/Python/Common/EdkIIWorkspace.py | 2 +-
BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py | 30 ++--
BaseTools/Source/Python/Common/EdkLogger.py | 18 +-
BaseTools/Source/Python/Common/FdfClassObject.py | 2 +-
BaseTools/Source/Python/Common/FdfParserLite.py | 40 ++---
BaseTools/Source/Python/Common/InfClassObject.py | 2 +-
BaseTools/Source/Python/Common/Misc.py | 80 ++++-----
BaseTools/Source/Python/Common/Parsing.py | 4 +-
BaseTools/Source/Python/Common/String.py | 4 +-
BaseTools/Source/Python/Common/TargetTxtClassObject.py | 6 +-
BaseTools/Source/Python/Common/ToolDefClassObject.py | 2 +-
BaseTools/Source/Python/Common/VpdInfoFile.py | 12 +-
BaseTools/Source/Python/CommonDataClass/CommonClass.py | 10 +-
BaseTools/Source/Python/Ecc/CParser.py | 12 +-
BaseTools/Source/Python/Ecc/Check.py | 2 +-
BaseTools/Source/Python/Ecc/CodeFragmentCollector.py | 4 +-
BaseTools/Source/Python/Ecc/Ecc.py | 30 ++--
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py | 4 +-
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py | 12 +-
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py | 14 +-
BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py | 10 +-
BaseTools/Source/Python/Ecc/c.py | 62 +++----
BaseTools/Source/Python/Eot/CParser.py | 12 +-
BaseTools/Source/Python/Eot/CodeFragmentCollector.py | 2 +-
BaseTools/Source/Python/Eot/Eot.py | 6 +-
BaseTools/Source/Python/Eot/FvImage.py | 42 ++---
BaseTools/Source/Python/Eot/InfParserLite.py | 2 +-
BaseTools/Source/Python/Eot/Report.py | 2 +-
BaseTools/Source/Python/GenFds/AprioriSection.py | 4 +-
BaseTools/Source/Python/GenFds/Capsule.py | 2 +-
BaseTools/Source/Python/GenFds/CompressSection.py | 8 +-
BaseTools/Source/Python/GenFds/DataSection.py | 4 +-
BaseTools/Source/Python/GenFds/DepexSection.py | 2 +-
BaseTools/Source/Python/GenFds/EfiSection.py | 26 +--
BaseTools/Source/Python/GenFds/FdfParser.py | 50 +++---
BaseTools/Source/Python/GenFds/FfsFileStatement.py | 12 +-
BaseTools/Source/Python/GenFds/FfsInfStatement.py | 48 ++---
BaseTools/Source/Python/GenFds/Fv.py | 32 ++--
BaseTools/Source/Python/GenFds/FvImageSection.py | 12 +-
BaseTools/Source/Python/GenFds/GenFds.py | 46 ++---
BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py | 28 +--
BaseTools/Source/Python/GenFds/GuidSection.py | 30 ++--
BaseTools/Source/Python/GenFds/OptRomFileStatement.py | 2 +-
BaseTools/Source/Python/GenFds/OptRomInfStatement.py | 16 +-
BaseTools/Source/Python/GenFds/OptionRom.py | 4 +-
BaseTools/Source/Python/GenFds/Region.py | 10 +-
BaseTools/Source/Python/GenFds/Section.py | 8 +-
BaseTools/Source/Python/GenFds/UiSection.py | 6 +-
BaseTools/Source/Python/GenFds/VerSection.py | 6 +-
BaseTools/Source/Python/GenFds/Vtf.py | 4 +-
BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py | 24 +--
BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py | 4 +-
BaseTools/Source/Python/TargetTool/TargetTool.py | 28 +--
BaseTools/Source/Python/Trim/Trim.py | 26 +--
BaseTools/Source/Python/UPT/Core/DependencyRules.py | 4 +-
BaseTools/Source/Python/UPT/Core/IpiDb.py | 30 ++--
BaseTools/Source/Python/UPT/Core/PackageFile.py | 2 +-
BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py | 6 +-
BaseTools/Source/Python/UPT/InstallPkg.py | 2 +-
| 12 +-
BaseTools/Source/Python/UPT/Library/Misc.py | 10 +-
BaseTools/Source/Python/UPT/Library/ParserValidate.py | 30 ++--
BaseTools/Source/Python/UPT/Library/Parsing.py | 10 +-
BaseTools/Source/Python/UPT/Library/String.py | 8 +-
BaseTools/Source/Python/UPT/Library/UniClassObject.py | 22 +--
BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py | 10 +-
BaseTools/Source/Python/UPT/Logger/Log.py | 18 +-
BaseTools/Source/Python/UPT/MkPkg.py | 2 +-
BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py | 12 +-
BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py | 46 ++---
BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py | 6 +-
| 10 +-
BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py | 4 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py | 10 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py | 6 +-
BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py | 6 +-
BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfParserMisc.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py | 4 +-
BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py | 18 +-
BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py | 18 +-
BaseTools/Source/Python/UPT/UPT.py | 2 +-
BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py | 2 +-
BaseTools/Source/Python/UPT/Xml/XmlParser.py | 38 ++--
BaseTools/Source/Python/Workspace/BuildClassObject.py | 2 +-
BaseTools/Source/Python/Workspace/DecBuildData.py | 36 ++--
BaseTools/Source/Python/Workspace/DscBuildData.py | 158 ++++++++--------
BaseTools/Source/Python/Workspace/InfBuildData.py | 190 ++++++++++----------
BaseTools/Source/Python/Workspace/MetaDataTable.py | 4 +-
BaseTools/Source/Python/Workspace/MetaFileParser.py | 10 +-
BaseTools/Source/Python/Workspace/MetaFileTable.py | 16 +-
BaseTools/Source/Python/Workspace/WorkspaceCommon.py | 12 +-
BaseTools/Source/Python/Workspace/WorkspaceDatabase.py | 6 +-
BaseTools/Source/Python/build/BuildReport.py | 38 ++--
BaseTools/Source/Python/build/build.py | 82 ++++-----
131 files changed, 1142 insertions(+), 1142 deletions(-)
diff --git a/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py b/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
index 92ee69978277..557ffa4505e4 100644
--- a/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
+++ b/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
@@ -49,18 +49,18 @@ def parseCmdArgs():
# validate the options
errors = []
- if options.WorkspacePath == None:
+ if options.WorkspacePath is None:
errors.append('- Please specify workspace path via option -w!')
elif not os.path.exists(options.WorkspacePath):
errors.append("- Invalid workspace path %s! The workspace path should be exist in absolute path!" % options.WorkspacePath)
- if options.PackagePath == None:
+ if options.PackagePath is None:
errors.append('- Please specify package DEC file path via option -p!')
elif not os.path.exists(options.PackagePath):
errors.append("- Invalid package's DEC file path %s! The DEC path should be exist in absolute path!" % options.PackagePath)
default = "C:\\Program Files\\doxygen\\bin\\doxygen.exe"
- if options.DoxygenPath == None:
+ if options.DoxygenPath is None:
if os.path.exists(default):
print "Warning: Assume doxygen tool is installed at %s. If not, please specify via -x" % default
options.DoxygenPath = default
@@ -69,7 +69,7 @@ def parseCmdArgs():
elif not os.path.exists(options.DoxygenPath):
errors.append("- Invalid doxygen tool path %s! The doxygen tool path should be exist in absolute path!" % options.DoxygenPath)
- if options.OutputPath != None:
+ if options.OutputPath is not None:
if not os.path.exists(options.OutputPath):
# create output
try:
@@ -77,7 +77,7 @@ def parseCmdArgs():
except:
errors.append('- Fail to create the output directory %s' % options.OutputPath)
else:
- if options.PackagePath != None and os.path.exists(options.PackagePath):
+ if options.PackagePath is not None and os.path.exists(options.PackagePath):
dirpath = os.path.dirname(options.PackagePath)
default = os.path.join (dirpath, "Document")
print 'Warning: Assume document output at %s. If not, please specify via option -o' % default
@@ -90,21 +90,21 @@ def parseCmdArgs():
else:
errors.append('- Please specify document output path via option -o!')
- if options.Arch == None:
+ if options.Arch is None:
options.Arch = 'ALL'
print "Warning: Assume arch is \"ALL\". If not, specify via -a"
- if options.DocumentMode == None:
+ if options.DocumentMode is None:
options.DocumentMode = "HTML"
print "Warning: Assume document mode is \"HTML\". If not, specify via -m"
- if options.IncludeOnly == None:
+ if options.IncludeOnly is None:
options.IncludeOnly = False
print "Warning: Assume generate package document for all package\'s source including publich interfaces and implementation libraries and modules."
if options.DocumentMode.lower() == 'chm':
default = "C:\\Program Files\\HTML Help Workshop\\hhc.exe"
- if options.HtmlWorkshopPath == None:
+ if options.HtmlWorkshopPath is None:
if os.path.exists(default):
print 'Warning: Assume the installation path of Microsoft HTML Workshop is %s. If not, specify via option -c.' % default
options.HtmlWorkshopPath = default
@@ -382,7 +382,7 @@ if __name__ == '__main__':
# create package model object firstly
pkgObj = createPackageObject(wspath, pkgpath)
- if pkgObj == None:
+ if pkgObj is None:
sys.exit(-1)
# create doxygen action model
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
index 2d0cc9d96e1f..488949f24b6f 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
@@ -58,7 +58,7 @@ class Page(BaseDoxygeItem):
return subpage
def AddPages(self, pageArray):
- if pageArray == None:
+ if pageArray is None:
return
for page in pageArray:
self.AddPage(page)
@@ -370,7 +370,7 @@ class DoxygenConfigFile:
self.mWarningFile = str.replace('\\', '/')
def FileExists(self, path):
- if path == None:
+ if path is None:
return False
if len(path) == 0:
return False
@@ -382,7 +382,7 @@ class DoxygenConfigFile:
return False
def AddFile(self, path):
- if path == None:
+ if path is None:
return
if len(path) == 0:
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
index 72becedb8e4e..9db16a63c07a 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
@@ -553,7 +553,7 @@ class EfiFvMapFile(object):
if line[0] != ' ':
# new entry
ret = rMapEntry.match(line)
- if ret != None:
+ if ret is not None:
name = ret.groups()[0]
baseaddr = int(ret.groups()[1], 16)
entry = int(ret.groups()[2], 16)
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
index 515e7a4fa7dd..bf1040d6bac4 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
@@ -34,7 +34,7 @@ class BaseINIFile(object):
if key not in cls._objs.keys():
cls._objs[key] = object.__new__(cls, *args, **kwargs)
- if parent != None:
+ if parent is not None:
cls._objs[key].AddParent(parent)
return cls._objs[key]
@@ -47,7 +47,7 @@ class BaseINIFile(object):
self._isModify = True
def AddParent(self, parent):
- if parent == None: return
+ if parent is None: return
if not hasattr(self, "_parents"):
self._parents = []
@@ -122,7 +122,7 @@ class BaseINIFile(object):
continue
m = section_re.match(templine)
- if m!= None: # found a section
+ if mis not None: # found a section
inGlobal = False
# Finish the latest section first
if len(sObjs) != 0:
@@ -165,7 +165,7 @@ class BaseINIFile(object):
def Destroy(self, parent):
# check referenced parent
- if parent != None:
+ if parent is not None:
assert parent in self._parents, "when destory ini object, can not found parent reference!"
self._parents.remove(parent)
@@ -307,7 +307,7 @@ class BaseINISection(object):
visit += 1
continue
line = line.split('#')[0].strip()
- if iniObj != None:
+ if iniObj is not None:
if line.endswith('}'):
iniObj._end = visit - self._start
if not iniObj.Parse():
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
index 5cf202857376..51de5cb74e3c 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
@@ -35,14 +35,14 @@ def WarnMsg(mess, fName=None, fNo=None):
def NormalMessage(type, mess, fName=None, fNo=None):
strMsg = type
- if fName != None:
+ if fName is not None:
strMsg += ' %s' % fName.replace('/', '\\')
- if fNo != None:
+ if fNo is not None:
strMsg += '(%d):' % fNo
else:
strMsg += ' :'
- if fName == None and fNo == None:
+ if fName is None and fNo is None:
strMsg += ' '
strMsg += mess
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
index 05fa2529be2d..7c120d85c255 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
@@ -74,7 +74,7 @@ class SurfaceObject(object):
def Load(self, relativePath):
# if has been loaded, directly return
- if self._fileObj != None: return True
+ if self._fileObj is not None: return True
relativePath = os.path.normpath(relativePath)
fullPath = os.path.join(self._workspace, relativePath)
@@ -160,7 +160,7 @@ class Platform(SurfaceObject):
return dsc.DSCFile
def GetModuleCount(self):
- if self.GetFileObj() == None:
+ if self.GetFileObj() is None:
ErrorMsg("Fail to get module count because DSC file has not been load!")
return len(self.GetFileObj().GetComponents())
@@ -171,7 +171,7 @@ class Platform(SurfaceObject):
def LoadModules(self, precallback=None, postcallback=None):
for obj in self.GetFileObj().GetComponents():
mFilename = obj.GetFilename()
- if precallback != None:
+ if precallback is not None:
precallback(self, mFilename)
arch = obj.GetArch()
if arch.lower() == 'common':
@@ -182,7 +182,7 @@ class Platform(SurfaceObject):
module = Module(self, self.GetWorkspace())
if module.Load(mFilename, arch, obj.GetOveridePcds(), obj.GetOverideLibs()):
self._modules.append(module)
- if postcallback != None:
+ if postcallback is not None:
postcallback(self, module)
else:
del module
@@ -222,7 +222,7 @@ class Platform(SurfaceObject):
for obj in objs:
if obj.GetPcdName().lower() == name.lower():
arr.append(obj)
- if arch != None:
+ if arch is not None:
arr = self.FilterObjsByArch(arr, arch)
return arr
@@ -292,7 +292,7 @@ class Platform(SurfaceObject):
newSect = newDsc.AddNewSection(oldSect.GetName())
for oldComObj in oldSect.GetObjects():
module = self.GetModuleObject(oldComObj.GetFilename(), oldSect.GetArch())
- if module == None: continue
+ if module is None: continue
newComObj = dsc.DSCComponentObject(newSect)
newComObj.SetFilename(oldComObj.GetFilename())
@@ -300,7 +300,7 @@ class Platform(SurfaceObject):
# add all library instance for override section
libdict = module.GetLibraries()
for libclass in libdict.keys():
- if libdict[libclass] != None:
+ if libdict[libclass] is not None:
newComObj.AddOverideLib(libclass, libdict[libclass].GetRelativeFilename().replace('\\', '/'))
# add all pcds for override section
@@ -338,7 +338,7 @@ class Module(SurfaceObject):
def Destroy(self):
for lib in self._libs.values():
- if lib != None:
+ if lib is not None:
lib.Destroy()
self._libs.clear()
@@ -351,12 +351,12 @@ class Module(SurfaceObject):
del self._ppis[:]
for protocol in self._protocols:
- if protocol != None:
+ if protocol is not None:
protocol.DeRef(self)
del self._protocols[:]
for guid in self._guids:
- if guid != None:
+ if guid is not None:
guid.DeRef(self)
del self._guids[:]
@@ -375,9 +375,9 @@ class Module(SurfaceObject):
return False
self._arch = arch
- if overidePcds != None:
+ if overidePcds is not None:
self._overideLibs = overideLibs
- if overideLibs != None:
+ if overideLibs is not None:
self._overidePcds = overidePcds
self._SearchLibraries()
@@ -403,7 +403,7 @@ class Module(SurfaceObject):
def GetPcds(self):
pcds = self._pcds.copy()
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
for name in lib._pcds.keys():
pcds[name] = lib._pcds[name]
return pcds
@@ -412,7 +412,7 @@ class Module(SurfaceObject):
ppis = []
ppis += self._ppis
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
ppis += lib._ppis
return ppis
@@ -420,7 +420,7 @@ class Module(SurfaceObject):
pros = []
pros = self._protocols
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
pros += lib._protocols
return pros
@@ -428,7 +428,7 @@ class Module(SurfaceObject):
guids = []
guids += self._guids
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
guids += lib._guids
return guids
@@ -436,12 +436,12 @@ class Module(SurfaceObject):
deps = []
deps += self._depexs
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
deps += lib._depexs
return deps
def IsLibrary(self):
- return self.GetFileObj().GetDefine("LIBRARY_CLASS") != None
+ return self.GetFileObj().GetDefine("LIBRARY_CLASS") is not None
def GetLibraryInstance(self, classname, arch, type):
if classname not in self._libs.keys():
@@ -454,7 +454,7 @@ class Module(SurfaceObject):
parent = self.GetParent()
if issubclass(parent.__class__, Platform):
path = parent.GetLibraryPath(classname, arch, type)
- if path == None:
+ if path is None:
ErrorMsg('Fail to get library instance for %s' % classname, self.GetFilename())
return None
self._libs[classname] = Library(self, self.GetWorkspace())
@@ -477,7 +477,7 @@ class Module(SurfaceObject):
continue
classname = obj.GetClass()
instance = self.GetLibraryInstance(classname, arch, type)
- if not self.IsLibrary() and instance != None:
+ if not self.IsLibrary() and instance is not None:
instance._isInherit = False
if classname not in self._libs.keys():
@@ -490,7 +490,7 @@ class Module(SurfaceObject):
pros = []
deps = []
guids = []
- if self.GetFileObj() != None:
+ if self.GetFileObj() is not None:
pcds = self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('pcd'),
self.GetArch())
for pcd in pcds:
@@ -534,31 +534,31 @@ class Module(SurfaceObject):
objs = self.GetFileObj().GetSectionObjectsByName('packages')
for obj in objs:
package = self.GetPlatform().GetPackage(obj.GetPath())
- if package != None:
+ if package is not None:
self._packages.append(package)
def GetPackages(self):
return self._packages
def GetPcdObjects(self):
- if self.GetFileObj() == None:
+ if self.GetFileObj() is None:
return []
return self.GetFileObj().GetSectionObjectsByName('pcd')
def GetLibraryClassHeaderFilePath(self):
lcname = self.GetFileObj().GetProduceLibraryClass()
- if lcname == None: return None
+ if lcname is None: return None
pkgs = self.GetPackages()
for package in pkgs:
path = package.GetLibraryClassHeaderPathByName(lcname)
- if path != None:
+ if path is not None:
return os.path.realpath(os.path.join(package.GetFileObj().GetPackageRootPath(), path))
return None
def Reload(self, force=False, callback=None):
- if callback != None:
+ if callback is not None:
callback(self, "Starting reload...")
ret = SurfaceObject.Reload(self, force)
@@ -568,7 +568,7 @@ class Module(SurfaceObject):
return True
for lib in self._libs.values():
- if lib != None:
+ if lib is not None:
lib.Destroy()
self._libs.clear()
@@ -591,13 +591,13 @@ class Module(SurfaceObject):
del self._packages[:]
del self._depexs[:]
- if callback != None:
+ if callback is not None:
callback(self, "Searching libraries...")
self._SearchLibraries()
- if callback != None:
+ if callback is not None:
callback(self, "Searching packages...")
self._SearchPackage()
- if callback != None:
+ if callback is not None:
callback(self, "Searching surface items...")
self._SearchSurfaceItems()
@@ -665,16 +665,16 @@ class Package(SurfaceObject):
def Destroy(self):
for pcd in self._pcds.values():
- if pcd != None:
+ if pcd is not None:
pcd.Destroy()
for guid in self._guids.values():
- if guid != None:
+ if guid is not None:
guid.Destroy()
for protocol in self._protocols.values():
- if protocol != None:
+ if protocol is not None:
protocol.Destroy()
for ppi in self._ppis.values():
- if ppi != None:
+ if ppi is not None:
ppi.Destroy()
self._pcds.clear()
self._guids.clear()
@@ -689,7 +689,7 @@ class Package(SurfaceObject):
pcds = self.GetFileObj().GetSectionObjectsByName('pcds')
for pcd in pcds:
if pcd.GetPcdName() in self._pcds.keys():
- if self._pcds[pcd.GetPcdName()] != None:
+ if self._pcds[pcd.GetPcdName()] is not None:
self._pcds[pcd.GetPcdName()].AddDecObj(pcd)
else:
self._pcds[pcd.GetPcdName()] = PcdItem(pcd.GetPcdName(), self, pcd)
@@ -726,7 +726,7 @@ class Package(SurfaceObject):
def GetPcdDefineObjs(self, name=None):
arr = []
objs = self.GetFileObj().GetSectionObjectsByName('pcds')
- if name == None: return objs
+ if name is None: return objs
for obj in objs:
if obj.GetPcdName().lower() == name.lower():
@@ -772,7 +772,7 @@ class ModulePcd(object):
def __init__(self, parent, name, infObj, pcdItem):
assert issubclass(parent.__class__, Module), "Module's PCD's parent must be module!"
- assert pcdItem != None, 'Pcd %s does not in some package!' % name
+ assert pcdItem is not None, 'Pcd %s does not in some package!' % name
self._name = name
self._parent = parent
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
index 3bd0b7b58795..9ff0df385154 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
@@ -77,7 +77,7 @@ class DECSection(ini.BaseINISection):
return arr[1]
def IsArchMatch(self, arch):
- if arch == None or self.GetArch() == 'common':
+ if arch is None or self.GetArch() == 'common':
return True
if self.GetArch().lower() != arch.lower():
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
index 268ba5c3bdd0..94b6588c0ddf 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
@@ -69,7 +69,7 @@ class DoxygenAction:
self._chmCallback = None
def Log(self, message, level='info'):
- if self._log != None:
+ if self._log is not None:
self._log(message, level)
def IsVerbose(self):
@@ -94,7 +94,7 @@ class DoxygenAction:
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
indexPagePath = self.GenerateIndexPage()
- if indexPagePath == None:
+ if indexPagePath is None:
self.Log("Fail to generate index page!\n", 'error')
return False
else:
@@ -109,7 +109,7 @@ class DoxygenAction:
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
# launch doxygen tool to generate document
- if self._doxygenCallback != None:
+ if self._doxygenCallback is not None:
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
if not self._doxygenCallback(self._doxPath, configFilePath):
return False
@@ -166,9 +166,9 @@ class PackageDocumentAction(DoxygenAction):
self._configFile.AddPreDefined('MDE_CPU_ARM')
namestr = self._pObj.GetName()
- if self._arch != None:
+ if self._arch is not None:
namestr += '[%s]' % self._arch
- if self._tooltag != None:
+ if self._tooltag is not None:
namestr += '[%s]' % self._tooltag
self._configFile.SetProjectName(namestr)
self._configFile.SetStripPath(self._pObj.GetWorkspace())
@@ -314,7 +314,7 @@ class PackageDocumentAction(DoxygenAction):
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
classPage = doxygen.Page(obj.GetClassName(),
"lc_%s" % obj.GetClassName())
@@ -399,7 +399,7 @@ class PackageDocumentAction(DoxygenAction):
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
filePath = mo.groups()[0]
- if filePath == None or len(filePath) == 0:
+ if filePath is None or len(filePath) == 0:
continue
# find header file in module's path firstly.
@@ -417,7 +417,7 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(incPath):
fullPath = incPath
break
- if infObj != None:
+ if infObj is not None:
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
for obj in pkgInfObjs:
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
@@ -433,10 +433,10 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(os.path.join(incPath, filePath)):
fullPath = os.path.join(os.path.join(incPath, filePath))
break
- if fullPath != None:
+ if fullPath is not None:
break
- if fullPath == None and self.IsVerbose():
+ if fullPath is None and self.IsVerbose():
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
return
else:
@@ -477,7 +477,7 @@ class PackageDocumentAction(DoxygenAction):
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
typeRoot = typeRootPageDict[obj.GetPcdType()]
- if self._arch != None:
+ if self._arch is not None:
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
@@ -573,7 +573,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('GUID', 'guid_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
else:
@@ -626,7 +626,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
else:
@@ -680,7 +680,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
else:
@@ -773,7 +773,7 @@ class PackageDocumentAction(DoxygenAction):
if not infObj.Parse():
self.Log('Fail to load INF file %s' % inf)
continue
- if infObj.GetProduceLibraryClass() != None:
+ if infObj.GetProduceLibraryClass() is not None:
libObjs.append(infObj)
else:
modObjs.append(infObj)
@@ -951,7 +951,7 @@ class PackageDocumentAction(DoxygenAction):
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
workspace,
refDecObjs)
- if retarr != None:
+ if retarr is not None:
pkgname, hPath = retarr
else:
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
index 876da1327b26..ca55929eda9a 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
@@ -66,7 +66,7 @@ class DoxygenAction:
self._chmCallback = None
def Log(self, message, level='info'):
- if self._log != None:
+ if self._log is not None:
self._log(message, level)
def IsVerbose(self):
@@ -91,7 +91,7 @@ class DoxygenAction:
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
indexPagePath = self.GenerateIndexPage()
- if indexPagePath == None:
+ if indexPagePath is None:
self.Log("Fail to generate index page!\n", 'error')
return False
else:
@@ -106,7 +106,7 @@ class DoxygenAction:
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
# launch doxygen tool to generate document
- if self._doxygenCallback != None:
+ if self._doxygenCallback is not None:
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
if not self._doxygenCallback(self._doxPath, configFilePath):
return False
@@ -167,9 +167,9 @@ class PackageDocumentAction(DoxygenAction):
self._configFile.AddPreDefined(macro)
namestr = self._pObj.GetName()
- if self._arch != None:
+ if self._arch is not None:
namestr += '[%s]' % self._arch
- if self._tooltag != None:
+ if self._tooltag is not None:
namestr += '[%s]' % self._tooltag
self._configFile.SetProjectName(namestr)
self._configFile.SetStripPath(self._pObj.GetWorkspace())
@@ -315,7 +315,7 @@ class PackageDocumentAction(DoxygenAction):
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
classPage = doxygen.Page(obj.GetClassName(),
"lc_%s" % obj.GetClassName())
@@ -401,7 +401,7 @@ class PackageDocumentAction(DoxygenAction):
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
filePath = mo.groups()[0]
- if filePath == None or len(filePath) == 0:
+ if filePath is None or len(filePath) == 0:
continue
# find header file in module's path firstly.
@@ -419,7 +419,7 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(incPath):
fullPath = incPath
break
- if infObj != None:
+ if infObj is not None:
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
for obj in pkgInfObjs:
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
@@ -435,10 +435,10 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(os.path.join(incPath, filePath)):
fullPath = os.path.join(os.path.join(incPath, filePath))
break
- if fullPath != None:
+ if fullPath is not None:
break
- if fullPath == None and self.IsVerbose():
+ if fullPath is None and self.IsVerbose():
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
return
else:
@@ -479,7 +479,7 @@ class PackageDocumentAction(DoxygenAction):
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
typeRoot = typeRootPageDict[obj.GetPcdType()]
- if self._arch != None:
+ if self._arch is not None:
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
@@ -575,7 +575,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('GUID', 'guid_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
else:
@@ -628,7 +628,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
else:
@@ -682,7 +682,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
else:
@@ -775,7 +775,7 @@ class PackageDocumentAction(DoxygenAction):
if not infObj.Parse():
self.Log('Fail to load INF file %s' % inf)
continue
- if infObj.GetProduceLibraryClass() != None:
+ if infObj.GetProduceLibraryClass() is not None:
libObjs.append(infObj)
else:
modObjs.append(infObj)
@@ -954,7 +954,7 @@ class PackageDocumentAction(DoxygenAction):
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
workspace,
refDecObjs)
- if retarr != None:
+ if retarr is not None:
pkgname, hPath = retarr
else:
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
index f8ed5315618c..0628fa740826 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
@@ -189,7 +189,7 @@ class DSCComponentObject(DSCSectionObject):
lines.append(' <%s>\n' % key)
for name, value in self._OveridePcds[key]:
- if value != None:
+ if value is not None:
lines.append(' %s|%s\n' % (name, value))
else:
lines.append(' %s\n' % name)
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
index 9d70fbcf97db..32b26850e766 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
@@ -23,7 +23,7 @@ class INFFile(ini.BaseINIFile):
def GetProduceLibraryClass(self):
obj = self.GetDefine("LIBRARY_CLASS")
- if obj == None: return None
+ if obj is None: return None
return obj.split('|')[0].strip()
@@ -59,7 +59,7 @@ class INFFile(ini.BaseINIFile):
if not ini.BaseINIFile.Parse(self):
return False
classname = self.GetProduceLibraryClass()
- if classname != None:
+ if classname is not None:
libobjdict = INFFile._libobjs
if libobjdict.has_key(classname):
if self not in libobjdict[classname]:
@@ -77,7 +77,7 @@ class INFFile(ini.BaseINIFile):
def Clear(self):
classname = self.GetProduceLibraryClass()
- if classname != None:
+ if classname is not None:
libobjdict = INFFile._libobjs
libobjdict[classname].remove(self)
if len(libobjdict[classname]) == 0:
@@ -114,7 +114,7 @@ class INFSection(ini.BaseINISection):
return arr[1]
def IsArchMatch(self, arch):
- if arch == None or self.GetArch() == 'common':
+ if arch is None or self.GetArch() == 'common':
return True
if self.GetArch().lower() != arch.lower():
@@ -258,9 +258,9 @@ class INFSourceObject(INFSectionObject):
del objdict[self.mFilename]
def IsMatchFamily(self, family):
- if family == None:
+ if family is None:
return True
- if self.mFamily != None:
+ if self.mFamily is not None:
if family.strip().lower() == self.mFamily.lower():
return True
else:
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 95e3e912b168..e54c8e66f3ad 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -765,7 +765,7 @@ class WorkspaceAutoGen(AutoGen):
for Fv in Fdf.Profile.FvDict:
_GuidDict = {}
for FfsFile in Fdf.Profile.FvDict[Fv].FfsList:
- if FfsFile.InfFileName and FfsFile.NameGuid == None:
+ if FfsFile.InfFileName and FfsFile.NameGuid is None:
#
# Get INF file GUID
#
@@ -816,7 +816,7 @@ class WorkspaceAutoGen(AutoGen):
ExtraData=self.FdfFile)
InfFoundFlag = False
- if FfsFile.NameGuid != None:
+ if FfsFile.NameGuid is not None:
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
#
@@ -938,13 +938,13 @@ class WorkspaceAutoGen(AutoGen):
## Return the directory to store FV files
def _GetFvDir(self):
- if self._FvDir == None:
+ if self._FvDir is None:
self._FvDir = path.join(self.BuildDir, 'FV')
return self._FvDir
## Return the directory to store all intermediate and final files built
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
return self.AutoGenObjectList[0].BuildDir
## Return the build output directory platform specifies
@@ -972,7 +972,7 @@ class WorkspaceAutoGen(AutoGen):
# @retval string Makefile directory
#
def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
+ if self._MakeFileDir is None:
self._MakeFileDir = self.BuildDir
return self._MakeFileDir
@@ -981,7 +981,7 @@ class WorkspaceAutoGen(AutoGen):
# @retval string Build command string
#
def _GetBuildCommand(self):
- if self._BuildCommand == None:
+ if self._BuildCommand is None:
# BuildCommand should be all the same. So just get one from platform AutoGen
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
@@ -1209,7 +1209,7 @@ class PlatformAutoGen(AutoGen):
self.VariableInfo = None
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
for Inf in self._AsBuildInfList:
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
@@ -1325,7 +1325,7 @@ class PlatformAutoGen(AutoGen):
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
- if SkuId == None or SkuId == '':
+ if SkuId is None or SkuId == '':
continue
if len(Sku.VariableName) > 0:
VariableGuidStructure = Sku.VariableGuidValue
@@ -1636,7 +1636,7 @@ class PlatformAutoGen(AutoGen):
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True
- if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
+ if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
@@ -1648,7 +1648,7 @@ class PlatformAutoGen(AutoGen):
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
FoundFlag = False
for VpdPcd in VpdFile._VpdArray.keys():
# This PCD has been referenced by module
@@ -1728,7 +1728,7 @@ class PlatformAutoGen(AutoGen):
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
VpdSkuMap[DscPcd] = SkuValueMap
- if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
+ if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
@@ -1811,14 +1811,14 @@ class PlatformAutoGen(AutoGen):
BPDGToolName = ToolDef["PATH"]
break
# Call third party GUID BPDG tool.
- if BPDGToolName != None:
+ if BPDGToolName is not None:
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
## Return the platform build data object
def _GetPlatform(self):
- if self._Platform == None:
+ if self._Platform is None:
self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
return self._Platform
@@ -1836,7 +1836,7 @@ class PlatformAutoGen(AutoGen):
## Return the FDF file name
def _GetFdfFile(self):
- if self._FdfFile == None:
+ if self._FdfFile is None:
if self.Workspace.FdfFile != "":
self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
else:
@@ -1849,7 +1849,7 @@ class PlatformAutoGen(AutoGen):
## Return the directory to store all intermediate and final files built
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
if os.path.isabs(self.OutputDir):
self._BuildDir = path.join(
path.abspath(self.OutputDir),
@@ -1869,7 +1869,7 @@ class PlatformAutoGen(AutoGen):
# @retval string Makefile directory
#
def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
+ if self._MakeFileDir is None:
self._MakeFileDir = path.join(self.BuildDir, self.Arch)
return self._MakeFileDir
@@ -1878,7 +1878,7 @@ class PlatformAutoGen(AutoGen):
# @retval string Build command string
#
def _GetBuildCommand(self):
- if self._BuildCommand == None:
+ if self._BuildCommand is None:
self._BuildCommand = []
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])
@@ -1900,7 +1900,7 @@ class PlatformAutoGen(AutoGen):
# Get each tool defition for given tool chain from tools_def.txt and platform
#
def _GetToolDefinition(self):
- if self._ToolDefinitions == None:
+ if self._ToolDefinitions is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
@@ -1966,13 +1966,13 @@ class PlatformAutoGen(AutoGen):
## Return the paths of tools
def _GetToolDefFile(self):
- if self._ToolDefFile == None:
+ if self._ToolDefFile is None:
self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
return self._ToolDefFile
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
def _GetToolChainFamily(self):
- if self._ToolChainFamily == None:
+ if self._ToolChainFamily is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
@@ -1985,7 +1985,7 @@ class PlatformAutoGen(AutoGen):
return self._ToolChainFamily
def _GetBuildRuleFamily(self):
- if self._BuildRuleFamily == None:
+ if self._BuildRuleFamily is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
@@ -1999,19 +1999,19 @@ class PlatformAutoGen(AutoGen):
## Return the build options specific for all modules in this platform
def _GetBuildOptions(self):
- if self._BuildOption == None:
+ if self._BuildOption is None:
self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
return self._BuildOption
## Return the build options specific for EDK modules in this platform
def _GetEdkBuildOptions(self):
- if self._EdkBuildOption == None:
+ if self._EdkBuildOption is None:
self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
return self._EdkBuildOption
## Return the build options specific for EDKII modules in this platform
def _GetEdkIIBuildOptions(self):
- if self._EdkIIBuildOption == None:
+ if self._EdkIIBuildOption is None:
self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
return self._EdkIIBuildOption
@@ -2020,7 +2020,7 @@ class PlatformAutoGen(AutoGen):
# @retval BuildRule object
#
def _GetBuildRule(self):
- if self._BuildRule == None:
+ if self._BuildRule is None:
BuildRuleFile = None
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
@@ -2040,7 +2040,7 @@ class PlatformAutoGen(AutoGen):
## Summarize the packages used by modules in this platform
def _GetPackageList(self):
- if self._PackageList == None:
+ if self._PackageList is None:
self._PackageList = set()
for La in self.LibraryAutoGenList:
self._PackageList.update(La.DependentPackageList)
@@ -2065,19 +2065,19 @@ class PlatformAutoGen(AutoGen):
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
- if self._NonDynamicPcdList == None:
+ if self._NonDynamicPcdList is None:
self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
def _GetDynamicPcdList(self):
- if self._DynamicPcdList == None:
+ if self._DynamicPcdList is None:
self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
def _GetPcdTokenNumbers(self):
- if self._PcdTokenNumber == None:
+ if self._PcdTokenNumber is None:
self._PcdTokenNumber = sdict()
TokenNumber = 1
#
@@ -2145,13 +2145,13 @@ class PlatformAutoGen(AutoGen):
## Summarize ModuleAutoGen objects of all modules to be built for this platform
def _GetModuleAutoGenList(self):
- if self._ModuleAutoGenList == None:
+ if self._ModuleAutoGenList is None:
self._GetAutoGenObjectList()
return self._ModuleAutoGenList
## Summarize ModuleAutoGen objects of all libraries to be built for this platform
def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
+ if self._LibraryAutoGenList is None:
self._GetAutoGenObjectList()
return self._LibraryAutoGenList
@@ -2215,9 +2215,9 @@ class PlatformAutoGen(AutoGen):
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
else:
LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
"Instance of library class [%s] is not found" % LibraryClassName,
File=self.MetaFile,
@@ -2227,7 +2227,7 @@ class PlatformAutoGen(AutoGen):
# for those forced library instance (NULL library), add a fake library class
if LibraryClassName.startswith("NULL"):
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
+ elif LibraryModule.LibraryClass is None \
or len(LibraryModule.LibraryClass) == 0 \
or (ModuleType != 'USER_DEFINED'
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
@@ -2243,7 +2243,7 @@ class PlatformAutoGen(AutoGen):
else:
LibraryModule = LibraryInstance[LibraryClassName]
- if LibraryModule == None:
+ if LibraryModule is None:
continue
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
@@ -2351,7 +2351,7 @@ class PlatformAutoGen(AutoGen):
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
- if FromPcd != None:
+ if FromPcd is not None:
if ToPcd.Pending and FromPcd.Type not in [None, '']:
ToPcd.Type = FromPcd.Type
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
@@ -2395,7 +2395,7 @@ class PlatformAutoGen(AutoGen):
ToPcd.validlists = FromPcd.validlists
ToPcd.expressions = FromPcd.expressions
- if FromPcd != None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
+ if FromPcd is not None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
% (ToPcd.TokenSpaceGuidCName, TokenCName))
Value = ToPcd.DefaultValue
@@ -2441,7 +2441,7 @@ class PlatformAutoGen(AutoGen):
Sku = PcdInModule.SkuInfoList[SkuId]
if Sku.VariableGuid == '': continue
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
- if Sku.VariableGuidValue == None:
+ if Sku.VariableGuidValue is None:
PackageList = "\n\t".join([str(P) for P in self.PackageList])
EdkLogger.error(
'build',
@@ -2504,12 +2504,12 @@ class PlatformAutoGen(AutoGen):
M = LibraryConsumerList.pop()
for LibraryName in M.Libraries:
Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
+ if Library is None:
for Key in self.Platform.LibraryClasses.data.keys():
if LibraryName.upper() == Key.upper():
Library = self.Platform.LibraryClasses[Key, ':dummy:']
break
- if Library == None:
+ if Library is None:
EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),
ExtraData="\t%s [%s]" % (str(Module), self.Arch))
continue
@@ -2564,13 +2564,13 @@ class PlatformAutoGen(AutoGen):
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
if (Key[0] == self.BuildRuleFamily and
- (ModuleStyle == None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
if Target == self.BuildTarget or Target == "*":
if ToolChain == self.ToolChain or ToolChain == "*":
if Arch == self.Arch or Arch == "*":
if Options[Key].startswith("="):
- if OverrideList.get(Key[1]) != None:
+ if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
@@ -2594,14 +2594,14 @@ class PlatformAutoGen(AutoGen):
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
- if Options.get((self.BuildRuleFamily, NextKey)) != None:
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
- if Options.get((self.BuildRuleFamily, NowKey)) != None:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
+ if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
@@ -2638,7 +2638,7 @@ class PlatformAutoGen(AutoGen):
return BuildOptions
for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
+ if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
@@ -2730,7 +2730,7 @@ class PlatformAutoGen(AutoGen):
BuildOptions[Tool][Attr] += " " + Value
else:
BuildOptions[Tool][Attr] = Value
- if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
+ if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:
#
# Override UNI flag only for EDK module.
#
@@ -2927,7 +2927,7 @@ class ModuleAutoGen(AutoGen):
# Macros could be used in build_rule.txt (also Makefile)
def _GetMacros(self):
- if self._Macro == None:
+ if self._Macro is None:
self._Macro = sdict()
self._Macro["WORKSPACE" ] = self.WorkspaceDir
self._Macro["MODULE_NAME" ] = self.Name
@@ -2967,7 +2967,7 @@ class ModuleAutoGen(AutoGen):
## Return the module build data object
def _GetModule(self):
- if self._Module == None:
+ if self._Module is None:
self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
return self._Module
@@ -3023,8 +3023,8 @@ class ModuleAutoGen(AutoGen):
## Check if the module is library or not
def _IsLibrary(self):
- if self._LibraryFlag == None:
- if self.Module.LibraryClass != None and self.Module.LibraryClass != []:
+ if self._LibraryFlag is None:
+ if self.Module.LibraryClass is not None and self.Module.LibraryClass != []:
self._LibraryFlag = True
else:
self._LibraryFlag = False
@@ -3036,7 +3036,7 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store intermediate files of the module
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
self._BuildDir = path.join(
self.PlatformInfo.BuildDir,
self.Arch,
@@ -3048,15 +3048,15 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store the intermediate object files of the mdoule
def _GetOutputDir(self):
- if self._OutputDir == None:
+ if self._OutputDir is None:
self._OutputDir = path.join(self.BuildDir, "OUTPUT")
CreateDirectory(self._OutputDir)
return self._OutputDir
## Return the directory to store ffs file
def _GetFfsOutputDir(self):
- if self._FfsOutputDir == None:
- if GlobalData.gFdfParser != None:
+ if self._FfsOutputDir is None:
+ if GlobalData.gFdfParser is not None:
self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name)
else:
self._FfsOutputDir = ''
@@ -3064,21 +3064,21 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store auto-gened source files of the mdoule
def _GetDebugDir(self):
- if self._DebugDir == None:
+ if self._DebugDir is None:
self._DebugDir = path.join(self.BuildDir, "DEBUG")
CreateDirectory(self._DebugDir)
return self._DebugDir
## Return the path of custom file
def _GetCustomMakefile(self):
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
for Type in self.Module.CustomMakefile:
if Type in gMakeTypeMap:
MakeType = gMakeTypeMap[Type]
else:
MakeType = 'nmake'
- if self.SourceOverrideDir != None:
+ if self.SourceOverrideDir is not None:
File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])
if not os.path.exists(File):
File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
@@ -3179,7 +3179,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The token list of the dependency expression after parsed
#
def _GetDepexTokenList(self):
- if self._DepexList == None:
+ if self._DepexList is None:
self._DepexList = {}
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return self._DepexList
@@ -3215,7 +3215,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The token list of the dependency expression after parsed
#
def _GetDepexExpressionTokenList(self):
- if self._DepexExpressionList == None:
+ if self._DepexExpressionList is None:
self._DepexExpressionList = {}
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return self._DepexExpressionList
@@ -3283,7 +3283,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The dict containing valid options
#
def _GetModuleBuildOption(self):
- if self._BuildOption == None:
+ if self._BuildOption is None:
self._BuildOption, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
if self.BuildRuleOrder:
self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
@@ -3294,7 +3294,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The include path list
#
def _GetBuildOptionIncPathList(self):
- if self._BuildOptionIncPathList == None:
+ if self._BuildOptionIncPathList is None:
#
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
# is the former use /I , the Latter used -I to specify include directories
@@ -3355,7 +3355,7 @@ class ModuleAutoGen(AutoGen):
# $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
#
def _GetSourceFileList(self):
- if self._SourceFileList == None:
+ if self._SourceFileList is None:
self._SourceFileList = []
for F in self.Module.Sources:
# match tool chain
@@ -3408,7 +3408,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of unicode files
def _GetUnicodeFileList(self):
- if self._UnicodeFileList == None:
+ if self._UnicodeFileList is None:
if TAB_UNICODE_FILE in self.FileTypes:
self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]
else:
@@ -3417,7 +3417,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of vfr files
def _GetVfrFileList(self):
- if self._VfrFileList == None:
+ if self._VfrFileList is None:
if TAB_VFR_FILE in self.FileTypes:
self._VfrFileList = self.FileTypes[TAB_VFR_FILE]
else:
@@ -3426,7 +3426,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of Image Definition files
def _GetIdfFileList(self):
- if self._IdfFileList == None:
+ if self._IdfFileList is None:
if TAB_IMAGE_FILE in self.FileTypes:
self._IdfFileList = self.FileTypes[TAB_IMAGE_FILE]
else:
@@ -3440,7 +3440,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of files which can be built later
#
def _GetBinaryFiles(self):
- if self._BinaryFileList == None:
+ if self._BinaryFileList is None:
self._BinaryFileList = []
for F in self.Module.Binaries:
if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:
@@ -3450,7 +3450,7 @@ class ModuleAutoGen(AutoGen):
return self._BinaryFileList
def _GetBuildRules(self):
- if self._BuildRules == None:
+ if self._BuildRules is None:
BuildRules = {}
BuildRuleDatabase = self.PlatformInfo.BuildRule
for Type in BuildRuleDatabase.FileTypeList:
@@ -3477,7 +3477,7 @@ class ModuleAutoGen(AutoGen):
return self._BuildRules
def _ApplyBuildRule(self, File, FileType):
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = {}
@@ -3502,7 +3502,7 @@ class ModuleAutoGen(AutoGen):
if Source != File:
CreateDirectory(Source.Dir)
- if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
+ if File.IsBinary and File == Source and self._BinaryFileList is not None and File in self._BinaryFileList:
# Skip all files that are not binary libraries
if not self.IsLibrary:
continue
@@ -3554,7 +3554,7 @@ class ModuleAutoGen(AutoGen):
FileType = TAB_UNKNOWN_FILE
def _GetTargets(self):
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = {}
@@ -3601,7 +3601,7 @@ class ModuleAutoGen(AutoGen):
if self.BuildType == 'UEFI_HII':
UniStringAutoGenC = False
IdfStringAutoGenC = False
- if self._AutoGenFileList == None:
+ if self._AutoGenFileList is None:
self._AutoGenFileList = {}
AutoGenC = TemplateString()
AutoGenH = TemplateString()
@@ -3624,29 +3624,29 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
self._AutoGenFileList[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None:
+ if UniStringBinBuffer is not None:
UniStringBinBuffer.close()
if str(StringIdf) != "":
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
self._AutoGenFileList[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None and IdfGenBinBuffer.getvalue() != "":
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
self._AutoGenFileList[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None:
+ if IdfGenBinBuffer is not None:
IdfGenBinBuffer.close()
return self._AutoGenFileList
## Return the list of library modules explicitly or implicityly used by this module
def _GetLibraryList(self):
- if self._DependentLibraryList == None:
+ if self._DependentLibraryList is None:
# only merge library classes and PCD for non-library module
if self.IsLibrary:
self._DependentLibraryList = []
@@ -3668,7 +3668,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of PCD
#
def _GetModulePcdList(self):
- if self._ModulePcdList == None:
+ if self._ModulePcdList is None:
# apply PCD settings from platform
self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
self.UpdateComments(self._PcdComments, self.Module.PcdComments)
@@ -3679,7 +3679,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of PCD
#
def _GetLibraryPcdList(self):
- if self._LibraryPcdList == None:
+ if self._LibraryPcdList is None:
Pcds = sdict()
if not self.IsLibrary:
# get PCDs from dependent libraries
@@ -3701,7 +3701,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between GUID cname and its value
#
def _GetGuidList(self):
- if self._GuidList == None:
+ if self._GuidList is None:
self._GuidList = sdict()
self._GuidList.update(self.Module.Guids)
for Library in self.DependentLibraryList:
@@ -3711,7 +3711,7 @@ class ModuleAutoGen(AutoGen):
return self._GuidList
def GetGuidsUsedByPcd(self):
- if self._GuidsUsedByPcd == None:
+ if self._GuidsUsedByPcd is None:
self._GuidsUsedByPcd = sdict()
self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())
for Library in self.DependentLibraryList:
@@ -3722,7 +3722,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between protocol cname and its value
#
def _GetProtocolList(self):
- if self._ProtocolList == None:
+ if self._ProtocolList is None:
self._ProtocolList = sdict()
self._ProtocolList.update(self.Module.Protocols)
for Library in self.DependentLibraryList:
@@ -3736,7 +3736,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between PPI cname and its value
#
def _GetPpiList(self):
- if self._PpiList == None:
+ if self._PpiList is None:
self._PpiList = sdict()
self._PpiList.update(self.Module.Ppis)
for Library in self.DependentLibraryList:
@@ -3750,7 +3750,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list path
#
def _GetIncludePathList(self):
- if self._IncludePathList == None:
+ if self._IncludePathList is None:
self._IncludePathList = []
if self.AutoGenVersion < 0x00010005:
for Inc in self.Module.Includes:
@@ -3942,7 +3942,7 @@ class ModuleAutoGen(AutoGen):
return
# Skip the following code for modules with no source files
- if self.SourceFileList == None or self.SourceFileList == []:
+ if self.SourceFileList is None or self.SourceFileList == []:
return
# Skip the following code for modules without any binary files
@@ -4157,7 +4157,7 @@ class ModuleAutoGen(AutoGen):
HexFormat = '0x%016x'
PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
else:
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
)
@@ -4437,7 +4437,7 @@ class ModuleAutoGen(AutoGen):
## Summarize the ModuleAutoGen objects of all libraries used by this module
def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
+ if self._LibraryAutoGenList is None:
self._LibraryAutoGenList = []
for Library in self.DependentLibraryList:
La = ModuleAutoGen(
@@ -4525,7 +4525,7 @@ class ModuleAutoGen(AutoGen):
return True
def GetTimeStampPath(self):
- if self._TimeStampPath == None:
+ if self._TimeStampPath is None:
self._TimeStampPath = os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
return self._TimeStampPath
def CreateTimeStamp(self, Makefile):
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
index 63ed47d94bcb..0daed7da610d 100644
--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
+++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -346,12 +346,12 @@ class BuildRule:
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]):
self.RuleFile = File
# Read build rules from file if it's not none
- if File != None:
+ if File is not None:
try:
self.RuleContent = open(File, 'r').readlines()
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
- elif Content != None:
+ elif Content is not None:
self.RuleContent = Content
else:
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
@@ -478,7 +478,7 @@ class BuildRule:
EdkLogger.error("build", FORMAT_INVALID, "No file type given",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
- if self._FileTypePattern.match(FileType) == None:
+ if self._FileTypePattern.match(FileType) is None:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch
@@ -561,7 +561,7 @@ class BuildRule:
FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
for ToolChainFamily in self._FamilyList:
InputFiles = self._RuleInfo[ToolChainFamily, self._State]
- if InputFiles == None:
+ if InputFiles is None:
InputFiles = []
self._RuleInfo[ToolChainFamily, self._State] = InputFiles
InputFiles.extend(FileList)
@@ -573,7 +573,7 @@ class BuildRule:
def ParseCommon(self, LineIndex):
for ToolChainFamily in self._FamilyList:
Items = self._RuleInfo[ToolChainFamily, self._State]
- if Items == None:
+ if Items is None:
Items = []
self._RuleInfo[ToolChainFamily, self._State] = Items
Items.append(self.RuleContent[LineIndex])
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py
index 481c4dda1447..cca6c8ab4fb5 100644
--- a/BaseTools/Source/Python/AutoGen/GenC.py
+++ b/BaseTools/Source/Python/AutoGen/GenC.py
@@ -1085,7 +1085,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if not Value.endswith('U'):
Value += 'U'
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']:
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
@@ -1122,7 +1122,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN', 'VOID*']:
# handle structure PCD
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
diff --git a/BaseTools/Source/Python/AutoGen/GenDepex.py b/BaseTools/Source/Python/AutoGen/GenDepex.py
index 7aa22bd944a0..9acea8f6bfed 100644
--- a/BaseTools/Source/Python/AutoGen/GenDepex.py
+++ b/BaseTools/Source/Python/AutoGen/GenDepex.py
@@ -360,7 +360,7 @@ class DependencyExpression:
FilePath = ""
FileChangeFlag = True
- if File == None:
+ if File is None:
sys.stdout.write(Buffer.getvalue())
FilePath = "STDOUT"
else:
@@ -414,13 +414,13 @@ def Main():
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Option.verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
try:
- if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
+ if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
DxsFile = ''
@@ -437,7 +437,7 @@ def Main():
EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
- if Option.OutputFile != None:
+ if Option.OutputFile is not None:
FileChangeFlag = Dpx.Generate(Option.OutputFile)
if not FileChangeFlag and DxsFile:
#
@@ -450,7 +450,7 @@ def Main():
Dpx.Generate()
except BaseException, X:
EdkLogger.quiet("")
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet(traceback.format_exc())
else:
EdkLogger.quiet(str(X))
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
index 60bd625cd2b1..dcdfcca1a5b0 100644
--- a/BaseTools/Source/Python/AutoGen/GenMake.py
+++ b/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -906,12 +906,12 @@ cleanlib:
# skip non-C files
if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":
continue
- elif DepSet == None:
+ elif DepSet is None:
DepSet = set(self.FileDependency[File])
else:
DepSet &= set(self.FileDependency[File])
# in case nothing in SourceFileList
- if DepSet == None:
+ if DepSet is None:
DepSet = set()
#
# Extract common files list in the dependency files
@@ -1516,7 +1516,7 @@ class TopLevelMakefile(BuildFile):
# TRICK: for not generating GenFds call in makefile if no FDF file
MacroList = []
- if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "":
+ if PlatformInfo.FdfFile is not None and PlatformInfo.FdfFile != "":
FdfFileList = [PlatformInfo.FdfFile]
# macros passed to GenFds
MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource.replace('\\', '\\\\')))
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
index e4d7f3b759a9..a2c4fb39ec85 100644
--- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py
+++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -1234,7 +1234,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
- if SkuId == None or SkuId == '':
+ if SkuId is None or SkuId == '':
continue
diff --git a/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/BaseTools/Source/Python/AutoGen/IdfClassObject.py
index d6d4703370aa..cb72219b40d5 100644
--- a/BaseTools/Source/Python/AutoGen/IdfClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/IdfClassObject.py
@@ -76,7 +76,7 @@ class IdfFileClassObject(object):
self.LoadIdfFile(File)
def LoadIdfFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
self.File = File
@@ -106,7 +106,7 @@ class IdfFileClassObject(object):
if Len == 4 and LineDetails[2] != 'TRANSPARENT':
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', LineDetails[1], re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(LineDetails[1]):
+ if MatchString is None or MatchString.end(0) != len(LineDetails[1]):
EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
if LineDetails[1] not in self.ImageIDList:
self.ImageIDList.append(LineDetails[1])
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py
index 9c7dd1e40374..73af1214eb0a 100644
--- a/BaseTools/Source/Python/AutoGen/StrGather.py
+++ b/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -150,7 +150,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Name = StringItem.StringName
Token = StringItem.Token
Referenced = StringItem.Referenced
- if Name != None:
+ if Name is not None:
Line = ''
if Referenced == True:
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
@@ -478,11 +478,11 @@ def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
# @retval FileList: A list of all files found
#
def GetFileList(SourceFileList, IncludeList, SkipList):
- if IncludeList == None:
+ if IncludeList is None:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
FileList = []
- if SkipList == None:
+ if SkipList is None:
SkipList = []
for File in SourceFileList:
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
index 856d19cda270..27644815dd38 100644
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -123,7 +123,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = LangConvTable.get(LangName.lower())
- if TempLangName != None:
+ if TempLangName is not None:
return TempLangName
return LangName
else:
@@ -135,7 +135,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if LangName.isalpha():
return LangName
elif length == 3:
- if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
@@ -143,7 +143,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
- if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
@@ -194,14 +194,14 @@ class StringDefClassObject(object):
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
- if Name != None:
+ if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
- if Value != None:
+ if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
- if Token != None:
+ if Token is not None:
self.Token = Token
def __str__(self):
@@ -212,7 +212,7 @@ class StringDefClassObject(object):
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
- if Value != None:
+ if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
@@ -351,7 +351,7 @@ class UniFileClassObject(object):
# Check the string name
if Name != '':
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)):
@@ -465,7 +465,7 @@ class UniFileClassObject(object):
# Load a .uni file
#
def LoadUniFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
self.File = File
#
@@ -521,7 +521,7 @@ class UniFileClassObject(object):
# Check the string name
if not self.IsCompatibleMode and Name != '':
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value)
continue
@@ -577,7 +577,7 @@ class UniFileClassObject(object):
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
- if Value != None:
+ if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
diff --git a/BaseTools/Source/Python/BPDG/BPDG.py b/BaseTools/Source/Python/BPDG/BPDG.py
index b1e328ff3f11..6c8f89f5d12b 100644
--- a/BaseTools/Source/Python/BPDG/BPDG.py
+++ b/BaseTools/Source/Python/BPDG/BPDG.py
@@ -57,21 +57,21 @@ def main():
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Options.debug_level != None:
+ elif Options.debug_level is not None:
EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Options.bin_filename == None:
+ if Options.bin_filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
- if Options.filename == None:
+ if Options.filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
- if Options.opt_force != None:
+ if Options.opt_force is not None:
Force = True
- if (Args[0] != None) :
+ if (Args[0] is not None) :
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
diff --git a/BaseTools/Source/Python/BPDG/GenVpd.py b/BaseTools/Source/Python/BPDG/GenVpd.py
index cdfc420c66f7..887240e94cb4 100644
--- a/BaseTools/Source/Python/BPDG/GenVpd.py
+++ b/BaseTools/Source/Python/BPDG/GenVpd.py
@@ -381,7 +381,7 @@ class GenVPD :
# Delete useless lines
while (True) :
try :
- if (self.FileLinesList[count] == None) :
+ if (self.FileLinesList[count] is None) :
del(self.FileLinesList[count])
else :
count += 1
@@ -398,7 +398,7 @@ class GenVPD :
# Process the pcds one by one base on the pcd's value and size
count = 0
for line in self.FileLinesList:
- if line != None :
+ if line is not None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName)
# Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ')
diff --git a/BaseTools/Source/Python/Common/DecClassObject.py b/BaseTools/Source/Python/Common/DecClassObject.py
index d7c70a7336a0..835dbd5935d2 100644
--- a/BaseTools/Source/Python/Common/DecClassObject.py
+++ b/BaseTools/Source/Python/Common/DecClassObject.py
@@ -116,7 +116,7 @@ class Dec(DecObject):
#
# Load Dec file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadDecFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/Dictionary.py b/BaseTools/Source/Python/Common/Dictionary.py
index 1c33fefabf98..f653275ff13f 100644
--- a/BaseTools/Source/Python/Common/Dictionary.py
+++ b/BaseTools/Source/Python/Common/Dictionary.py
@@ -54,7 +54,7 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit
# @param Dict: The dictionary to be printed
#
def printDict(Dict):
- if Dict != None:
+ if Dict is not None:
KeyList = Dict.keys()
for Key in KeyList:
if Dict[Key] != '':
diff --git a/BaseTools/Source/Python/Common/DscClassObject.py b/BaseTools/Source/Python/Common/DscClassObject.py
index c2fa1c275a2d..b98dbf57229b 100644
--- a/BaseTools/Source/Python/Common/DscClassObject.py
+++ b/BaseTools/Source/Python/Common/DscClassObject.py
@@ -128,7 +128,7 @@ class Dsc(DscObject):
#
# Load Dsc file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadDscFile(Filename)
#
@@ -902,7 +902,7 @@ class Dsc(DscObject):
#
def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName='', VariableGuid='', VariableOffset='', HiiDefaultValue='', VpdOffset='', DefaultValue=''):
SkuNameList = GetSplitValueList(SkuNameList)
- if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:
+ if SkuNameList is None or SkuNameList == [] or SkuNameList == ['']:
SkuNameList = ['DEFAULT']
SkuInfoList = {}
for Item in SkuNameList:
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspace.py b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
index f22a545b77ce..c14b4eb52d50 100644
--- a/BaseTools/Source/Python/Common/EdkIIWorkspace.py
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
@@ -38,7 +38,7 @@ class EdkIIWorkspace:
#
# Check environment valiable 'WORKSPACE'
#
- if os.environ.get('WORKSPACE') == None:
+ if os.environ.get('WORKSPACE') is None:
print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
return False
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
index d6df01d4ce06..c0966d526519 100644
--- a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
@@ -93,7 +93,7 @@ class PcdClassObject(object):
# @retval True The two pcds are the same
#
def __eq__(self, Other):
- return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+ return Other is not None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
## Override __hash__ function
#
@@ -121,7 +121,7 @@ class LibraryClassObject(object):
def __init__(self, Name = None, SupModList = [], Type = None):
self.LibraryClass = Name
self.SupModList = SupModList
- if Type != None:
+ if Type is not None:
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
## ModuleBuildClassObject
@@ -864,7 +864,7 @@ class WorkspaceBuild(object):
for Libs in Pb.LibraryClass:
for Type in Libs.SupModList:
Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)
- if Instance == None:
+ if Instance is None:
Instance = RecommendedInstance
Pb.LibraryClasses[(Lib, Type)] = Instance
else:
@@ -872,7 +872,7 @@ class WorkspaceBuild(object):
# For Module
#
Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)
- if Instance == None:
+ if Instance is None:
Instance = RecommendedInstance
Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance
@@ -912,7 +912,7 @@ class WorkspaceBuild(object):
if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):
continue
Module = self.Build[Arch].ModuleDatabase[Inf]
- if Module.LibraryClass == None or Module.LibraryClass == []:
+ if Module.LibraryClass is None or Module.LibraryClass == []:
self.UpdateLibrariesOfModule(Platform, Module, Arch)
for Key in Module.LibraryClasses:
Lib = Module.LibraryClasses[Key]
@@ -969,15 +969,15 @@ class WorkspaceBuild(object):
continue
LibraryClassName = Key[0]
- if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:
- if LibraryPath == None or LibraryPath == "":
+ if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] is None:
+ if LibraryPath is None or LibraryPath == "":
LibraryInstance[LibraryClassName] = None
continue
LibraryModule = ModuleDatabase[LibraryPath]
LibraryInstance[LibraryClassName] = LibraryModule
LibraryConsumerList.append(LibraryModule)
EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))
- elif LibraryPath == None or LibraryPath == "":
+ elif LibraryPath is None or LibraryPath == "":
continue
else:
LibraryModule = LibraryInstance[LibraryClassName]
@@ -1002,7 +1002,7 @@ class WorkspaceBuild(object):
Q = []
for LibraryClassName in LibraryInstance:
M = LibraryInstance[LibraryClassName]
- if M == None:
+ if M is None:
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
"Library instance for library class [%s] is not found" % LibraryClassName,
ExtraData="\t%s [%s]" % (str(Module), Arch))
@@ -1011,7 +1011,7 @@ class WorkspaceBuild(object):
# check if there're duplicate library classes
#
for Lc in M.LibraryClass:
- if Lc.SupModList != None and ModuleType not in Lc.SupModList:
+ if Lc.SupModList is not None and ModuleType not in Lc.SupModList:
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
"Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),
ExtraData="\t%s" % str(Module))
@@ -1380,7 +1380,7 @@ class WorkspaceBuild(object):
if (Name, Guid) in Pcds:
OwnerPlatform = Dsc
Pcd = Pcds[(Name, Guid)]
- if Pcd.Type != '' and Pcd.Type != None:
+ if Pcd.Type != '' and Pcd.Type is not None:
NewType = Pcd.Type
if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:
NewType = DataType.TAB_PCDS_DYNAMIC
@@ -1396,13 +1396,13 @@ class WorkspaceBuild(object):
EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
- if Pcd.DatumType != '' and Pcd.DatumType != None:
+ if Pcd.DatumType != '' and Pcd.DatumType is not None:
DatumType = Pcd.DatumType
- if Pcd.TokenValue != '' and Pcd.TokenValue != None:
+ if Pcd.TokenValue != '' and Pcd.TokenValue is not None:
Token = Pcd.TokenValue
- if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:
+ if Pcd.DefaultValue != '' and Pcd.DefaultValue is not None:
Value = Pcd.DefaultValue
- if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:
+ if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize is not None:
MaxDatumSize = Pcd.MaxDatumSize
SkuInfoList = Pcd.SkuInfoList
diff --git a/BaseTools/Source/Python/Common/EdkLogger.py b/BaseTools/Source/Python/Common/EdkLogger.py
index ac1c8edc4fe2..3f462df49ada 100644
--- a/BaseTools/Source/Python/Common/EdkLogger.py
+++ b/BaseTools/Source/Python/Common/EdkLogger.py
@@ -89,7 +89,7 @@ def debug(Level, Message, ExtraData=None):
"msg" : Message,
}
- if ExtraData != None:
+ if ExtraData is not None:
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DebugMessageTemplate % TemplateDict
@@ -119,10 +119,10 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
return
# if no tool name given, use caller's source file name as tool name
- if ToolName == None or ToolName == "":
+ if ToolName is None or ToolName == "":
ToolName = os.path.basename(traceback.extract_stack()[-2][0])
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
@@ -134,12 +134,12 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
"msg" : Message,
}
- if File != None:
+ if File is not None:
LogText = _WarningMessageTemplate % TemplateDict
else:
LogText = _WarningMessageTemplateWithoutFile % TemplateDict
- if ExtraData != None:
+ if ExtraData is not None:
LogText += "\n %s" % ExtraData
_InfoLogger.log(WARN, LogText)
@@ -168,18 +168,18 @@ info = _InfoLogger.info
# it's True. This is the default behavior.
#
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
- if Message == None:
+ if Message is None:
if ErrorCode in gErrorMessage:
Message = gErrorMessage[ErrorCode]
else:
Message = gErrorMessage[UNKNOWN_ERROR]
- if ExtraData == None:
+ if ExtraData is None:
ExtraData = ""
TemplateDict = {
@@ -191,7 +191,7 @@ def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=Non
"extra" : ExtraData
}
- if File != None:
+ if File is not None:
LogText = _ErrorMessageTemplate % TemplateDict
else:
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
diff --git a/BaseTools/Source/Python/Common/FdfClassObject.py b/BaseTools/Source/Python/Common/FdfClassObject.py
index 3e7d44954c88..3d37800d9ab7 100644
--- a/BaseTools/Source/Python/Common/FdfClassObject.py
+++ b/BaseTools/Source/Python/Common/FdfClassObject.py
@@ -51,7 +51,7 @@ class Fdf(FdfObject):
#
# Load Fdf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadFdfFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/FdfParserLite.py b/BaseTools/Source/Python/Common/FdfParserLite.py
index df287414db6f..496241a7b217 100644
--- a/BaseTools/Source/Python/Common/FdfParserLite.py
+++ b/BaseTools/Source/Python/Common/FdfParserLite.py
@@ -353,7 +353,7 @@ class FdfParser(object):
if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
Value = Profile.MacroValue
- if Value != None:
+ if Value is not None:
Str = Str.replace('$(' + Name + ')', Value)
MacroEnd = MacroStart + len(Value)
@@ -676,8 +676,8 @@ class FdfParser(object):
FileLineTuple = GetRealFileLine(self.FileName, Line)
if Name in InputMacroDict:
MacroValue = InputMacroDict[Name]
- if Op == None:
- if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
+ if Op is None:
+ if Value == 'Bool' and MacroValue is None or MacroValue.upper() == 'FALSE':
return False
return True
elif Op == '!=':
@@ -691,7 +691,7 @@ class FdfParser(object):
else:
return False
else:
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue is not None and MacroValue.isdigit())):
InputVal = long(Value, 0)
MacroVal = long(MacroValue, 0)
if Op == '>':
@@ -721,8 +721,8 @@ class FdfParser(object):
for Profile in AllMacroList:
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
- if Op == None:
- if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
+ if Op is None:
+ if Value == 'Bool' and Profile.MacroValue is None or Profile.MacroValue.upper() == 'FALSE':
return False
return True
elif Op == '!=':
@@ -736,7 +736,7 @@ class FdfParser(object):
else:
return False
else:
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue is not None and Profile.MacroValue.isdigit())):
InputVal = long(Value, 0)
MacroVal = long(Profile.MacroValue, 0)
if Op == '>':
@@ -932,7 +932,7 @@ class FdfParser(object):
if not self.__GetNextToken():
return False
- if RangeExpression.RegGuidPattern.match(self.__Token) != None:
+ if RangeExpression.RegGuidPattern.match(self.__Token) is not None:
return True
else:
self.__UndoToken()
@@ -1451,7 +1451,7 @@ class FdfParser(object):
pass
for Item in Obj.BlockSizeList:
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
raise Warning("expected block statement for Fd Section", self.FileName, self.CurrentLineNumber)
return True
@@ -2420,7 +2420,7 @@ class FdfParser(object):
FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()
FvImageSectionObj.Alignment = AlignValue
- if FvObj != None:
+ if FvObj is not None:
FvImageSectionObj.Fv = FvObj
FvImageSectionObj.FvName = None
else:
@@ -2940,7 +2940,7 @@ class FdfParser(object):
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
while True:
@@ -2967,7 +2967,7 @@ class FdfParser(object):
Rule.Fixed = Fixed
Rule.FileExtension = Ext
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
return Rule
@@ -3010,7 +3010,7 @@ class FdfParser(object):
Rule.Fixed = Fixed
Rule.FileName = self.__Token
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
return Rule
@@ -3147,7 +3147,7 @@ class FdfParser(object):
EfiSectionObj.KeepReloc = False
else:
EfiSectionObj.KeepReloc = True
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
else:
raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
@@ -3469,7 +3469,7 @@ class FdfParser(object):
raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)
Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
- if Pattern.match(self.__Token) == None:
+ if Pattern.match(self.__Token) is None:
raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)
CompStatementObj.CompVer = self.__Token
@@ -3542,7 +3542,7 @@ class FdfParser(object):
for elementRegion in FdObj.RegionList:
if elementRegion.RegionType == 'FV':
for elementRegionData in elementRegion.RegionDataList:
- if elementRegionData != None and elementRegionData.upper() not in FvList:
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:
FvList.append(elementRegionData.upper())
return FvList
@@ -3559,9 +3559,9 @@ class FdfParser(object):
for FfsObj in FvObj.FfsList:
if isinstance(FfsObj, FfsFileStatement.FileStatement):
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
RefFvList.append(FfsObj.FvName.upper())
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
RefFdList.append(FfsObj.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
@@ -3582,9 +3582,9 @@ class FdfParser(object):
while SectionStack != []:
SectionObj = SectionStack.pop()
if isinstance(SectionObj, FvImageSection.FvImageSection):
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
FvList.append(SectionObj.FvName.upper())
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
FvList.append(SectionObj.Fv.UiFvName.upper())
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
diff --git a/BaseTools/Source/Python/Common/InfClassObject.py b/BaseTools/Source/Python/Common/InfClassObject.py
index f24e4e41a0c1..ba43eb548471 100644
--- a/BaseTools/Source/Python/Common/InfClassObject.py
+++ b/BaseTools/Source/Python/Common/InfClassObject.py
@@ -199,7 +199,7 @@ class Inf(InfObject):
#
# Load Inf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadInfFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index 7d44fdcf8ba7..1e6e61026474 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -85,7 +85,7 @@ def _parseForXcode(lines, efifilepath, varnames):
for varname in varnames:
if varname in line:
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)
- if m != None:
+ if m is not None:
ret.append((varname, m.group(1)))
return ret
@@ -110,27 +110,27 @@ def _parseForGCC(lines, efifilepath, varnames):
# status handler
if status == 3:
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
- if m != None:
+ if m is not None:
sections.append(m.groups(0))
for varname in varnames:
Str = ''
m = re.match("^.data.(%s)" % varname, line)
- if m != None:
+ if m is not None:
m = re.match(".data.(%s)$" % varname, line)
- if m != None:
+ if m is not None:
Str = lines[index + 1]
else:
Str = line[len(".data.%s" % varname):]
if Str:
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip())
- if m != None:
+ if m is not None:
varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
if not varoffset:
return []
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return []
#redirection
redirection = 0
@@ -166,19 +166,19 @@ def _parseGeneral(lines, efifilepath, varnames):
continue
if status == 1 and len(line) != 0:
m = secRe.match(line)
- assert m != None, "Fail to parse the section in map file , line is %s" % line
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
for varname in varnames:
m = symRe.match(line)
- assert m != None, "Fail to parse the symbol in map file, line is %s" % line
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
m2 = re.match('^[_]*(%s)' % varname, sym_name)
- if m2 != None:
+ if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
@@ -188,7 +188,7 @@ def _parseGeneral(lines, efifilepath, varnames):
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return []
ret = []
@@ -423,7 +423,7 @@ def GuidStructureStringToGuidValueName(GuidValue):
# @param Directory The directory name
#
def CreateDirectory(Directory):
- if Directory == None or Directory.strip() == "":
+ if Directory is None or Directory.strip() == "":
return True
try:
if not os.access(Directory, os.F_OK):
@@ -437,7 +437,7 @@ def CreateDirectory(Directory):
# @param Directory The directory name
#
def RemoveDirectory(Directory, Recursively=False):
- if Directory == None or Directory.strip() == "" or not os.path.exists(Directory):
+ if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):
return
if Recursively:
CurrentDirectory = os.getcwd()
@@ -540,7 +540,7 @@ def DataDump(Data, File):
except:
EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False)
finally:
- if Fd != None:
+ if Fd is not None:
Fd.close()
## Restore a Python object from a file
@@ -560,7 +560,7 @@ def DataRestore(File):
EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e)))
Data = None
finally:
- if Fd != None:
+ if Fd is not None:
Fd.close()
return Data
@@ -668,7 +668,7 @@ def GetFiles(Root, SkipList=None, FullPath=True):
# @retval False if file doesn't exists
#
def ValidFile(File, Ext=None):
- if Ext != None:
+ if Ext is not None:
Dummy, FileExt = os.path.splitext(File)
if FileExt.lower() != Ext.lower():
return False
@@ -715,13 +715,13 @@ def RealPath2(File, Dir='', OverrideDir=''):
#
def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
NewFile = File
- if Ext != None:
+ if Ext is not None:
Dummy, FileExt = os.path.splitext(File)
if FileExt.lower() != Ext.lower():
return False, File
# Replace the Edk macros
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
if OverrideDir.find('$(EFI_SOURCE)') > -1:
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
if OverrideDir.find('$(EDK_SOURCE)') > -1:
@@ -737,19 +737,19 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
NewFile = File.replace('$(EFI_SOURCE)', EfiSource)
NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource)
NewFile = AllFiles[os.path.normpath(NewFile)]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
# Second check the path with override value
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
# Last check the path with normal definitions
File = os.path.join(Dir, File)
NewFile = AllFiles[os.path.normpath(File)]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
return False, File
@@ -759,7 +759,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
#
def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
# Replace the Edk macros
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
if OverrideDir.find('$(EFI_SOURCE)') > -1:
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
if OverrideDir.find('$(EDK_SOURCE)') > -1:
@@ -781,23 +781,23 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.'
File = File.replace('$(EFI_SOURCE)', EfiSource)
File = File.replace('$(EDK_SOURCE)', EdkSource)
NewFile = AllFiles[os.path.normpath(File)]
- if NewFile != None:
+ if NewFile is not None:
NewRelaPath = os.path.dirname(NewFile)
File = os.path.basename(NewFile)
#NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
break
# Second check the path with override value
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
- if NewFile != None:
+ if NewFile is not None:
#NewRelaPath = os.path.dirname(NewFile)
NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
break
# Last check the path with normal definitions
NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))]
- if NewFile != None:
+ if NewFile is not None:
break
# No file found
@@ -1062,7 +1062,7 @@ class Progressor:
self.CodaMessage = CloseMessage
self.ProgressChar = ProgressChar
self.Interval = Interval
- if Progressor._StopFlag == None:
+ if Progressor._StopFlag is None:
Progressor._StopFlag = threading.Event()
## Start to print progress charater
@@ -1070,10 +1070,10 @@ class Progressor:
# @param OpenMessage The string printed before progress charaters
#
def Start(self, OpenMessage=None):
- if OpenMessage != None:
+ if OpenMessage is not None:
self.PromptMessage = OpenMessage
Progressor._StopFlag.clear()
- if Progressor._ProgressThread == None:
+ if Progressor._ProgressThread is None:
Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
Progressor._ProgressThread.setDaemon(False)
Progressor._ProgressThread.start()
@@ -1084,7 +1084,7 @@ class Progressor:
#
def Stop(self, CloseMessage=None):
OriginalCodaMessage = self.CodaMessage
- if CloseMessage != None:
+ if CloseMessage is not None:
self.CodaMessage = CloseMessage
self.Abort()
self.CodaMessage = OriginalCodaMessage
@@ -1107,9 +1107,9 @@ class Progressor:
## Abort the progress display
@staticmethod
def Abort():
- if Progressor._StopFlag != None:
+ if Progressor._StopFlag is not None:
Progressor._StopFlag.set()
- if Progressor._ProgressThread != None:
+ if Progressor._ProgressThread is not None:
Progressor._ProgressThread.join()
Progressor._ProgressThread = None
@@ -1228,7 +1228,7 @@ class sdict(IterableUserDict):
return key, value
def update(self, dict=None, **kwargs):
- if dict != None:
+ if dict is not None:
for k, v in dict.items():
self[k] = v
if len(kwargs):
@@ -1301,7 +1301,7 @@ class tdict:
if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
- if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
+ if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:
FirstKey = self._Wildcard
if self._Single_:
@@ -1316,24 +1316,24 @@ class tdict:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
- if Value == None:
+ if Value is None:
for Key in self.data:
Value = self.data[Key][RestKeys]
- if Value != None: break
+ if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
- if Value == None and self._Wildcard in self.data:
+ if Value is None and self._Wildcard in self.data:
#print "Value=None"
Value = self.data[self._Wildcard][RestKeys]
else:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey]
- if Value == None:
+ if Value is None:
for Key in self.data:
Value = self.data[Key]
- if Value != None: break
+ if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey]
@@ -2066,7 +2066,7 @@ class PathClass(object):
return hash(self.Path)
def _GetFileKey(self):
- if self._Key == None:
+ if self._Key is None:
self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target
return self._Key
diff --git a/BaseTools/Source/Python/Common/Parsing.py b/BaseTools/Source/Python/Common/Parsing.py
index 584fc7f3c3a0..d199d1e40d8e 100644
--- a/BaseTools/Source/Python/Common/Parsing.py
+++ b/BaseTools/Source/Python/Common/Parsing.py
@@ -299,7 +299,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
- if TokenInfoString != '' and TokenInfoString != None:
+ if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
@@ -550,7 +550,7 @@ def GetComponents(Lines, Key, KeyValues, CommentCharacter):
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
- if Line == None or Line == '':
+ if Line is None or Line == '':
continue
if findBlock == False:
diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py
index 696be4c1f0b2..5dc5b85dc5a4 100644
--- a/BaseTools/Source/Python/Common/String.py
+++ b/BaseTools/Source/Python/Common/String.py
@@ -634,7 +634,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
# @retval True The file type is correct
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper():
ContainerFile = open(ContainerFilename, 'r').read()
@@ -662,7 +662,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
index 387e51523097..0ba7725dd5b5 100644
--- a/BaseTools/Source/Python/Common/TargetTxtClassObject.py
+++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
@@ -45,7 +45,7 @@ class TargetTxtClassObject(object):
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
}
self.ConfDirectoryPath = ""
- if Filename != None:
+ if Filename is not None:
self.LoadTargetTxtFile(Filename)
## LoadTargetTxtFile
@@ -83,7 +83,7 @@ class TargetTxtClassObject(object):
self.ConfDirectoryPath = os.path.dirname(FileName)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
- if F != None:
+ if F is not None:
F.close()
for Line in F:
@@ -144,7 +144,7 @@ class TargetTxtClassObject(object):
# @param Dict: The dictionary to be printed
#
def printDict(Dict):
- if Dict != None:
+ if Dict is not None:
KeyList = Dict.keys()
for Key in KeyList:
if Dict[Key] != '':
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py
index dc90b4783f2f..1ab848f1ec68 100644
--- a/BaseTools/Source/Python/Common/ToolDefClassObject.py
+++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py
@@ -53,7 +53,7 @@ class ToolDefClassObject(object):
for Env in os.environ:
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
- if FileName != None:
+ if FileName is not None:
self.LoadToolDefFile(FileName)
## LoadToolDefFile
diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py
index 716155e96d29..b1baf06b9ccd 100644
--- a/BaseTools/Source/Python/Common/VpdInfoFile.py
+++ b/BaseTools/Source/Python/Common/VpdInfoFile.py
@@ -89,7 +89,7 @@ class VpdInfoFile:
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, skuname,Offset):
- if (Vpd == None):
+ if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"):
@@ -100,7 +100,7 @@ class VpdInfoFile:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
- if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
+ if Vpd.MaxDatumSize is None or Vpd.MaxDatumSize == "":
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= 0:
@@ -122,7 +122,7 @@ class VpdInfoFile:
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
- if not (FilePath != None or len(FilePath) != 0):
+ if not (FilePath is not None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
@@ -227,8 +227,8 @@ class VpdInfoFile:
# @param VpdFileName The string path name for VPD information guid.txt
#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
- assert ToolPath != None, "Invalid parameter ToolPath"
- assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
+ assert ToolPath is not None, "Invalid parameter ToolPath"
+ assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
@@ -250,7 +250,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
(out, error) = PopenObject.communicate()
print out
- while PopenObject.returncode == None :
+ while PopenObject.returncode is None :
PopenObject.wait()
if PopenObject.returncode != 0:
diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
index e6c4495c95e7..6a8262e5e964 100644
--- a/BaseTools/Source/Python/CommonDataClass/CommonClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
@@ -44,11 +44,11 @@ def GenerateHelpText(Text, Lang):
class CommonClass(object):
def __init__(self, Usage = None, FeatureFlag = '', SupArchList = None, HelpText = ''):
self.Usage = Usage
- if self.Usage == None:
+ if self.Usage is None:
self.Usage = []
self.FeatureFlag = FeatureFlag
self.SupArchList = SupArchList
- if self.SupArchList == None:
+ if self.SupArchList is None:
self.SupArchList = []
self.HelpText = HelpText
self.HelpTextList = []
@@ -375,13 +375,13 @@ class PcdClass(CommonClass):
self.PcdCName = ''
self.Value = ''
self.Offset = ''
- if self.ValidUsage == None:
+ if self.ValidUsage is None:
self.ValidUsage = []
self.SkuInfoList = SkuInfoList
- if self.SkuInfoList == None:
+ if self.SkuInfoList is None:
self.SkuInfoList = {}
self.SupModuleList = SupModuleList
- if self.SupModuleList == None:
+ if self.SupModuleList is None:
self.SupModuleList = []
CommonClass.__init__(self)
self.PcdErrors = []
diff --git a/BaseTools/Source/Python/Ecc/CParser.py b/BaseTools/Source/Python/Ecc/CParser.py
index 41f2811430a0..94711a9a378a 100644
--- a/BaseTools/Source/Python/Ecc/CParser.py
+++ b/BaseTools/Source/Python/Ecc/CParser.py
@@ -783,14 +783,14 @@ class CParser(Parser):
if self.backtracking == 0:
- if d != None:
+ if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
- if a != None:
+ if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
@@ -920,7 +920,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if b != None:
+ if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
@@ -957,7 +957,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if t != None:
+ if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
@@ -1401,7 +1401,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if s.stop != None:
+ if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
@@ -1416,7 +1416,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if e.stop != None:
+ if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py
index 5864758950ce..27783e617b92 100644
--- a/BaseTools/Source/Python/Ecc/Check.py
+++ b/BaseTools/Source/Python/Ecc/Check.py
@@ -1299,7 +1299,7 @@ class Check(object):
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Name = Record[1].strip()
- if Name != '' and Name != None:
+ if Name != '' and Name is not None:
if Name[0] == '(':
Name = Name[1:Name.find(')')]
if Name.find('(') > -1:
diff --git a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
index 171600feebf9..fbe0c41b38b7 100644
--- a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
@@ -301,7 +301,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
@@ -423,7 +423,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py
index 94f9a427e370..60dfc00260f1 100644
--- a/BaseTools/Source/Python/Ecc/Ecc.py
+++ b/BaseTools/Source/Python/Ecc/Ecc.py
@@ -178,7 +178,7 @@ class Ecc(object):
self.BuildMetaDataFileDatabase(SpeciDirs)
if self.ScanSourceCode:
EdkLogger.quiet("Building database for Meta Data File Done!")
- if SpeciDirs == None:
+ if SpeciDirs is None:
c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
else:
for specificDir in SpeciDirs:
@@ -195,7 +195,7 @@ class Ecc(object):
#
def BuildMetaDataFileDatabase(self, SpecificDirs = None):
ScanFolders = []
- if SpecificDirs == None:
+ if SpecificDirs is None:
ScanFolders.append(EccGlobalData.gTarget)
else:
for specificDir in SpecificDirs:
@@ -346,15 +346,15 @@ class Ecc(object):
self.SetLogLevel(Options)
# Set other options
- if Options.ConfigFile != None:
+ if Options.ConfigFile is not None:
self.ConfigFile = Options.ConfigFile
- if Options.OutputFile != None:
+ if Options.OutputFile is not None:
self.OutputFile = Options.OutputFile
- if Options.ReportFile != None:
+ if Options.ReportFile is not None:
self.ReportFile = Options.ReportFile
- if Options.ExceptionFile != None:
+ if Options.ExceptionFile is not None:
self.ExceptionFile = Options.ExceptionFile
- if Options.Target != None:
+ if Options.Target is not None:
if not os.path.isdir(Options.Target):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
else:
@@ -362,15 +362,15 @@ class Ecc(object):
else:
EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
- if Options.keepdatabase != None:
+ if Options.keepdatabase is not None:
self.IsInit = False
- if Options.metadata != None and Options.sourcecode != None:
+ if Options.metadata is not None and Options.sourcecode is not None:
EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
- if Options.metadata != None:
+ if Options.metadata is not None:
self.ScanSourceCode = False
- if Options.sourcecode != None:
+ if Options.sourcecode is not None:
self.ScanMetaData = False
- if Options.folders != None:
+ if Options.folders is not None:
self.OnlyScan = True
## SetLogLevel
@@ -380,11 +380,11 @@ class Ecc(object):
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
index a27e98c9752f..6b980150f53e 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
@@ -116,7 +116,7 @@ class Table(object):
SqlCommand = """select max(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
Id = Record[0][0]
- if Id == None:
+ if Id is None:
Id = self.IdBase
return Id
@@ -191,7 +191,7 @@ class TableDataModel(Table):
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
- if Count != None and Count != 0:
+ if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
index ba478f9ecf10..34ab586084f1 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
@@ -228,7 +228,7 @@ class MetaFileParser(object):
self.Start()
# No specific ARCH or Platform given, use raw data
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._RawTable.Query(*DataInfo)
# Do post-process if necessary
@@ -564,7 +564,7 @@ class InfParser(MetaFileParser):
self._ValueList = ['','','']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
continue
#
@@ -877,7 +877,7 @@ class DscParser(MetaFileParser):
self._ValueList = ['', '', '']
self._SectionParser[SectionType](self)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
@@ -1197,7 +1197,7 @@ class DscParser(MetaFileParser):
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
@@ -1573,7 +1573,7 @@ class DecParser(MetaFileParser):
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
@@ -1932,7 +1932,7 @@ class Fdf(FdfObject):
#
# Load Fdf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
try:
self.LoadFdfFile(Filename)
except Exception:
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
index 54a3016948b1..9faa6b58b001 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
@@ -117,9 +117,9 @@ class ModuleTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
- if Platform != None and Platform != 'COMMON':
+ if Platform is not None and Platform != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -198,7 +198,7 @@ class PackageTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -283,17 +283,17 @@ class PlatformTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Scope1 != None and Scope1 != 'COMMON':
+ if Scope1 is not None and Scope1 != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
- if Scope2 != None and Scope2 != 'COMMON':
+ if Scope2 is not None and Scope2 != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
else:
ConditionString += " AND BelongsToItem<0"
- if FromItem != None:
+ if FromItem is not None:
ConditionString += " AND FromItem=%s" % FromItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
diff --git a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
index b93588eea61a..a86f19624c44 100644
--- a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
@@ -30,14 +30,14 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
- if String != '' and String != None:
+ if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if type(Item) == type([]):
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
@@ -46,7 +46,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
@@ -62,7 +62,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
# @revel Nodes A list of XML nodes matching XPath style Sting.
#
def XmlList(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
@@ -98,7 +98,7 @@ def XmlList(Dom, String):
# @revel Node A single XML node matching XPath style Sting.
#
def XmlNode(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return ""
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py
index 35b7405e550d..a956294baa0f 100644
--- a/BaseTools/Source/Python/Ecc/c.py
+++ b/BaseTools/Source/Python/Ecc/c.py
@@ -550,7 +550,7 @@ def CollectSourceCodeDataIntoDB(RootDir):
Db.UpdateIdentifierBelongsToFunction()
def GetTableID(FullFileName, ErrorMsgList=None):
- if ErrorMsgList == None:
+ if ErrorMsgList is None:
ErrorMsgList = []
Db = GetDB()
@@ -575,7 +575,7 @@ def GetIncludeFileList(FullFileName):
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
return []
IFList = IncludeFileListDict.get(FullFileName)
- if IFList != None:
+ if IFList is not None:
return IFList
FileID = GetTableID(FullFileName)
@@ -601,12 +601,12 @@ def GetFullPathOfIncludeFile(Str, IncludePathList):
return None
def GetAllIncludeFiles(FullFileName):
- if AllIncludeFileListDict.get(FullFileName) != None:
+ if AllIncludeFileListDict.get(FullFileName) is not None:
return AllIncludeFileListDict.get(FullFileName)
FileDirName = os.path.dirname(FullFileName)
IncludePathList = IncludePathListDict.get(FileDirName)
- if IncludePathList == None:
+ if IncludePathList is None:
IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
if FileDirName not in IncludePathList:
IncludePathList.insert(0, FileDirName)
@@ -618,7 +618,7 @@ def GetAllIncludeFiles(FullFileName):
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
- if FullPath != None:
+ if FullPath is not None:
IncludeFileQueue.append(FullPath)
i = 0
@@ -629,7 +629,7 @@ def GetAllIncludeFiles(FullFileName):
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
- if FullPath != None and FullPath not in IncludeFileQueue:
+ if FullPath is not None and FullPath not in IncludeFileQueue:
IncludeFileQueue.insert(i + 1, FullPath)
i += 1
@@ -853,7 +853,7 @@ def DiffModifier(Str1, Str2):
def GetTypedefDict(FullFileName):
Dict = ComplexTypeDict.get(FullFileName)
- if Dict != None:
+ if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
@@ -898,7 +898,7 @@ def GetTypedefDict(FullFileName):
def GetSUDict(FullFileName):
Dict = SUDict.get(FullFileName)
- if Dict != None:
+ if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
@@ -983,9 +983,9 @@ def StripComments(Str):
def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
Value = TypedefDict.get(Type)
- if Value == None:
+ if Value is None:
Value = SUDict.get(Type)
- if Value == None:
+ if Value is None:
return None
LBPos = Value.find('{')
@@ -994,11 +994,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
for FT in FTList:
if FT not in ('struct', 'union'):
Value = TypedefDict.get(FT)
- if Value == None:
+ if Value is None:
Value = SUDict.get(FT)
break
- if Value == None:
+ if Value is None:
return None
LBPos = Value.find('{')
@@ -1025,11 +1025,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
return None
def GetRealType(Type, TypedefDict, TargetType=None):
- if TargetType != None and Type == TargetType:
+ if TargetType is not None and Type == TargetType:
return Type
while TypedefDict.get(Type):
Type = TypedefDict.get(Type)
- if TargetType != None and Type == TargetType:
+ if TargetType is not None and Type == TargetType:
return Type
return Type
@@ -1043,10 +1043,10 @@ def GetTypeInfo(RefList, Modifier, FullFileName, TargetType=None):
while Index < len(RefList):
FieldName = RefList[Index]
FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
- if FromType == None:
+ if FromType is None:
return None
# we want to determine the exact type.
- if TargetType != None:
+ if TargetType is not None:
Type = FromType.split()[0]
# we only want to check if it is a pointer
else:
@@ -1151,7 +1151,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1174,7 +1174,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
Type = TypeList[-1]
if Type == '*' and len(TypeList) >= 2:
Type = TypeList[-2]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1199,7 +1199,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1230,7 +1230,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1933,12 +1933,12 @@ def CheckPointerNullComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
- if PredInfo[1] == None:
+ if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
@@ -1960,7 +1960,7 @@ def CheckPointerNullComparison(FullFileName):
continue
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('*') != -1 and Type != 'BOOLEAN*':
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -1971,7 +1971,7 @@ def CheckPointerNullComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
Type = GetTypeFromArray(Type, PredVarStr)
if Type.find('*') != -1 and Type != 'BOOLEAN*':
@@ -2012,12 +2012,12 @@ def CheckNonBooleanValueComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
- if PredInfo[1] == None:
+ if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
@@ -2040,7 +2040,7 @@ def CheckNonBooleanValueComparison(FullFileName):
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -2050,7 +2050,7 @@ def CheckNonBooleanValueComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
@@ -2091,7 +2091,7 @@ def CheckBooleanValueComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
@@ -2119,7 +2119,7 @@ def CheckBooleanValueComparison(FullFileName):
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -2130,7 +2130,7 @@ def CheckBooleanValueComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
diff --git a/BaseTools/Source/Python/Eot/CParser.py b/BaseTools/Source/Python/Eot/CParser.py
index 41f2811430a0..94711a9a378a 100644
--- a/BaseTools/Source/Python/Eot/CParser.py
+++ b/BaseTools/Source/Python/Eot/CParser.py
@@ -783,14 +783,14 @@ class CParser(Parser):
if self.backtracking == 0:
- if d != None:
+ if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
- if a != None:
+ if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
@@ -920,7 +920,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if b != None:
+ if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
@@ -957,7 +957,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if t != None:
+ if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
@@ -1401,7 +1401,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if s.stop != None:
+ if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
@@ -1416,7 +1416,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if e.stop != None:
+ if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
diff --git a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
index bb78a0f882d5..b977a9d5322c 100644
--- a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
@@ -291,7 +291,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
diff --git a/BaseTools/Source/Python/Eot/Eot.py b/BaseTools/Source/Python/Eot/Eot.py
index 5029f7369d4a..c4164199acf3 100644
--- a/BaseTools/Source/Python/Eot/Eot.py
+++ b/BaseTools/Source/Python/Eot/Eot.py
@@ -579,11 +579,11 @@ class Eot(object):
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
diff --git a/BaseTools/Source/Python/Eot/FvImage.py b/BaseTools/Source/Python/Eot/FvImage.py
index 0f742c7d86c2..affca4e71e8a 100644
--- a/BaseTools/Source/Python/Eot/FvImage.py
+++ b/BaseTools/Source/Python/Eot/FvImage.py
@@ -52,7 +52,7 @@ class Image(array):
return array.__new__(cls, 'B')
def __init__(m, ID=None):
- if ID == None:
+ if ID is None:
m._ID_ = str(uuid.uuid1()).upper()
else:
m._ID_ = ID
@@ -208,7 +208,7 @@ class FirmwareVolume(Image):
return (CouldBeLoaded, DepexString, FileDepex)
def Dispatch(self, Db = None):
- if Db == None:
+ if Db is None:
return False
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
# Find PeiCore, DexCore, PeiPriori, DxePriori first
@@ -236,15 +236,15 @@ class FirmwareVolume(Image):
continue
# Parse SEC_CORE first
- if FfsSecCoreGuid != None:
+ if FfsSecCoreGuid is not None:
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
self.LoadPpi(Db, FfsSecCoreGuid)
# Parse PEI first
- if FfsPeiCoreGuid != None:
+ if FfsPeiCoreGuid is not None:
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
self.LoadPpi(Db, FfsPeiCoreGuid)
- if FfsPeiPrioriGuid != None:
+ if FfsPeiPrioriGuid is not None:
# Load PEIM described in priori file
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
if len(FfsPeiPriori.Sections) == 1:
@@ -263,10 +263,10 @@ class FirmwareVolume(Image):
self.DisPatchPei(Db)
# Parse DXE then
- if FfsDxeCoreGuid != None:
+ if FfsDxeCoreGuid is not None:
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
self.LoadProtocol(Db, FfsDxeCoreGuid)
- if FfsDxePrioriGuid != None:
+ if FfsDxePrioriGuid is not None:
# Load PEIM described in priori file
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
if len(FfsDxePriori.Sections) == 1:
@@ -383,7 +383,7 @@ class FirmwareVolume(Image):
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
- if FileDepex != None:
+ if FileDepex is not None:
ScheduleList.insert.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
else:
ScheduleList[FfsID] = NewFfs
@@ -471,7 +471,7 @@ class FirmwareVolume(Image):
FfsId = repr(FfsObj)
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
- if LastFfsObj != None:
+ if LastFfsObj is not None:
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
else:
if FfsId in self.FfsDict:
@@ -480,7 +480,7 @@ class FirmwareVolume(Image):
% (FfsObj.Guid, FfsObj.Offset,
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
self.FfsDict[FfsId] = FfsObj
- if LastFfsObj != None:
+ if LastFfsObj is not None:
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
FfsStartAddress += len(FfsObj)
@@ -527,11 +527,11 @@ class CompressedImage(Image):
def __init__(m, CompressedData=None, CompressionType=None, UncompressedLength=None):
Image.__init__(m)
- if UncompressedLength != None:
+ if UncompressedLength is not None:
m.UncompressedLength = UncompressedLength
- if CompressionType != None:
+ if CompressionType is not None:
m.CompressionType = CompressionType
- if CompressedData != None:
+ if CompressedData is not None:
m.Data = CompressedData
def __str__(m):
@@ -607,13 +607,13 @@ class GuidDefinedImage(Image):
def __init__(m, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):
Image.__init__(m)
- if SectionDefinitionGuid != None:
+ if SectionDefinitionGuid is not None:
m.SectionDefinitionGuid = SectionDefinitionGuid
- if DataOffset != None:
+ if DataOffset is not None:
m.DataOffset = DataOffset
- if Attributes != None:
+ if Attributes is not None:
m.Attributes = Attributes
- if Data != None:
+ if Data is not None:
m.Data = Data
def __str__(m):
@@ -791,7 +791,7 @@ class Depex(Image):
else:
CurrentData = m._OPCODE_
m._ExprList.append(Token)
- if CurrentData == None:
+ if CurrentData is None:
break
return m._ExprList
@@ -867,9 +867,9 @@ class Section(Image):
def __init__(m, Type=None, Size=None):
Image.__init__(m)
m._Alignment = 1
- if Type != None:
+ if Type is not None:
m.Type = Type
- if Size != None:
+ if Size is not None:
m.Size = Size
def __str__(m):
@@ -1283,7 +1283,7 @@ class LinkMap:
for Line in MapFile:
Line = Line.strip()
if not MappingStart:
- if MappingTitle.match(Line) != None:
+ if MappingTitle.match(Line) is not None:
MappingStart = True
continue
ResultList = MappingFormat.findall(Line)
diff --git a/BaseTools/Source/Python/Eot/InfParserLite.py b/BaseTools/Source/Python/Eot/InfParserLite.py
index 6bb2c5f9f1d6..8867bb5dc23a 100644
--- a/BaseTools/Source/Python/Eot/InfParserLite.py
+++ b/BaseTools/Source/Python/Eot/InfParserLite.py
@@ -52,7 +52,7 @@ class EdkInfParser(object):
self.SourceOverridePath = SourceOverridePath
# Load Inf file if filename is not None
- if Filename != None:
+ if Filename is not None:
self.LoadInfFile(Filename)
if SourceFileList:
diff --git a/BaseTools/Source/Python/Eot/Report.py b/BaseTools/Source/Python/Eot/Report.py
index 386e3eb8ec05..7435b4d7c930 100644
--- a/BaseTools/Source/Python/Eot/Report.py
+++ b/BaseTools/Source/Python/Eot/Report.py
@@ -234,7 +234,7 @@ class Report(object):
#
def GenerateFfs(self, FfsObj):
self.FfsIndex = self.FfsIndex + 1
- if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
+ if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
FfsGuid = FfsObj.Guid
FfsOffset = FfsObj._OFF_
FfsName = 'Unknown-Module'
diff --git a/BaseTools/Source/Python/GenFds/AprioriSection.py b/BaseTools/Source/Python/GenFds/AprioriSection.py
index 70e2e5a3baf2..92a74670ed25 100644
--- a/BaseTools/Source/Python/GenFds/AprioriSection.py
+++ b/BaseTools/Source/Python/GenFds/AprioriSection.py
@@ -75,11 +75,11 @@ class AprioriSection (AprioriSectionClassObject):
InfFileName = NormPath(FfsObj.InfFileName)
Arch = FfsObj.GetCurrentArch()
- if Arch != None:
+ if Arch is not None:
Dict['$(ARCH)'] = Arch
InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
- if Arch != None:
+ if Arch is not None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
Guid = Inf.Guid
diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py
index e03d78995737..9332f016f7da 100644
--- a/BaseTools/Source/Python/GenFds/Capsule.py
+++ b/BaseTools/Source/Python/GenFds/Capsule.py
@@ -159,7 +159,7 @@ class Capsule (CapsuleClassObject) :
if not os.path.isabs(fmp.ImageFile):
CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)
CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'
- if ExternalTool == None:
+ if ExternalTool is None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)
else:
CmdOption += ExternalTool
diff --git a/BaseTools/Source/Python/GenFds/CompressSection.py b/BaseTools/Source/Python/GenFds/CompressSection.py
index 56e71a35453b..08ab48669f45 100644
--- a/BaseTools/Source/Python/GenFds/CompressSection.py
+++ b/BaseTools/Source/Python/GenFds/CompressSection.py
@@ -55,7 +55,7 @@ class CompressSection (CompressSectionClassObject) :
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
- if FfsInf != None:
+ if FfsInf is not None:
self.CompType = FfsInf.__ExtendMacro__(self.CompType)
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
@@ -67,13 +67,13 @@ class CompressSection (CompressSectionClassObject) :
Index = Index + 1
SecIndex = '%s.%d' %(SecNum, Index)
ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
- if AlignValue != None:
- if MaxAlign == None:
+ if AlignValue is not None:
+ if MaxAlign is None:
MaxAlign = AlignValue
if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
MaxAlign = AlignValue
if ReturnSectList != []:
- if AlignValue == None:
+ if AlignValue is None:
AlignValue = "1"
for FileData in ReturnSectList:
SectFiles += (FileData,)
diff --git a/BaseTools/Source/Python/GenFds/DataSection.py b/BaseTools/Source/Python/GenFds/DataSection.py
index 2d2975f75c0f..40e345eee77e 100644
--- a/BaseTools/Source/Python/GenFds/DataSection.py
+++ b/BaseTools/Source/Python/GenFds/DataSection.py
@@ -52,7 +52,7 @@ class DataSection (DataSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsFile != None:
+ if FfsFile is not None:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
else:
@@ -92,7 +92,7 @@ class DataSection (DataSectionClassObject):
NoStrip = True
if self.SecType in ('TE', 'PE32'):
- if self.KeepReloc != None:
+ if self.KeepReloc is not None:
NoStrip = self.KeepReloc
if not NoStrip:
diff --git a/BaseTools/Source/Python/GenFds/DepexSection.py b/BaseTools/Source/Python/GenFds/DepexSection.py
index 1992d2abd807..ef30a2f083c6 100644
--- a/BaseTools/Source/Python/GenFds/DepexSection.py
+++ b/BaseTools/Source/Python/GenFds/DepexSection.py
@@ -86,7 +86,7 @@ class DepexSection (DepexSectionClassObject):
for Exp in ExpList:
if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):
GuidStr = self.__FindGuidValue(Exp)
- if GuidStr == None:
+ if GuidStr is None:
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
"Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py
index 5029ec7a1823..7e6c88a0594e 100644
--- a/BaseTools/Source/Python/GenFds/EfiSection.py
+++ b/BaseTools/Source/Python/GenFds/EfiSection.py
@@ -55,10 +55,10 @@ class EfiSection (EfiSectionClassObject):
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :
- if self.FileName != None and self.FileName.startswith('PCD('):
+ if self.FileName is not None and self.FileName.startswith('PCD('):
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
"""Prepare the parameter of GenSection"""
- if FfsInf != None :
+ if FfsInf is not None :
InfFileName = FfsInf.InfFileName
SectionType = FfsInf.__ExtendMacro__(self.SectionType)
Filename = FfsInf.__ExtendMacro__(self.FileName)
@@ -66,20 +66,20 @@ class EfiSection (EfiSectionClassObject):
StringData = FfsInf.__ExtendMacro__(self.StringData)
NoStrip = True
if FfsInf.ModuleType in ('SEC', 'PEI_CORE', 'PEIM') and SectionType in ('TE', 'PE32'):
- if FfsInf.KeepReloc != None:
+ if FfsInf.KeepReloc is not None:
NoStrip = FfsInf.KeepReloc
- elif FfsInf.KeepRelocFromRule != None:
+ elif FfsInf.KeepRelocFromRule is not None:
NoStrip = FfsInf.KeepRelocFromRule
- elif self.KeepReloc != None:
+ elif self.KeepReloc is not None:
NoStrip = self.KeepReloc
- elif FfsInf.ShadowFromInfFile != None:
+ elif FfsInf.ShadowFromInfFile is not None:
NoStrip = FfsInf.ShadowFromInfFile
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
"""If the file name was pointed out, add it in FileList"""
FileList = []
- if Filename != None:
+ if Filename is not None:
Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
# check if the path is absolute or relative
if os.path.isabs(Filename):
@@ -107,14 +107,14 @@ class EfiSection (EfiSectionClassObject):
if SectionType == 'VERSION':
InfOverrideVerString = False
- if FfsInf.Version != None:
+ if FfsInf.Version is not None:
#StringData = FfsInf.Version
BuildNum = FfsInf.Version
InfOverrideVerString = True
if InfOverrideVerString:
#VerTuple = ('-n', '"' + StringData + '"')
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
@@ -136,7 +136,7 @@ class EfiSection (EfiSectionClassObject):
VerString = f.read()
f.close()
BuildNum = VerString
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
#Ui=VerString,
@@ -146,7 +146,7 @@ class EfiSection (EfiSectionClassObject):
else:
BuildNum = StringData
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
@@ -173,7 +173,7 @@ class EfiSection (EfiSectionClassObject):
elif SectionType == 'UI':
InfOverrideUiString = False
- if FfsInf.Ui != None:
+ if FfsInf.Ui is not None:
StringData = FfsInf.Ui
InfOverrideUiString = True
@@ -196,7 +196,7 @@ class EfiSection (EfiSectionClassObject):
Ui=UiString, IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
else:
- if StringData != None and len(StringData) > 0:
+ if StringData is not None and len(StringData) > 0:
UiTuple = ('-n', '"' + StringData + '"')
else:
UiTuple = tuple()
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index e35057931f03..51cc466ccccd 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -638,7 +638,7 @@ class FdfParser:
if not MacroVal:
if Macro in MacroDict:
MacroVal = MacroDict[Macro]
- if MacroVal != None:
+ if MacroVal is not None:
IncFileName = IncFileName.replace('$(' + Macro + ')', MacroVal, 1)
if MacroVal.find('$(') != -1:
PreIndex = StartPos
@@ -686,7 +686,7 @@ class FdfParser:
# list index of the insertion, note that line number is 'CurrentLine + 1'
InsertAtLine = CurrentLine
ParentProfile = GetParentAtLine (CurrentLine)
- if ParentProfile != None:
+ if ParentProfile is not None:
ParentProfile.IncludeFileList.insert(0, IncFileProfile)
IncFileProfile.Level = ParentProfile.Level + 1
IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
@@ -762,7 +762,7 @@ class FdfParser:
while StartPos != -1 and EndPos != -1 and self.__Token not in ['!ifdef', '!ifndef', '!if', '!elseif']:
MacroName = CurLine[StartPos+2 : EndPos]
MacorValue = self.__GetMacroValue(MacroName)
- if MacorValue != None:
+ if MacorValue is not None:
CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)
if MacorValue.find('$(') != -1:
PreIndex = StartPos
@@ -1135,7 +1135,7 @@ class FdfParser:
if not self.__GetNextToken():
return False
- if RangeExpression.RegGuidPattern.match(self.__Token) != None:
+ if RangeExpression.RegGuidPattern.match(self.__Token) is not None:
return True
else:
self.__UndoToken()
@@ -1411,7 +1411,7 @@ class FdfParser:
#'\n\tGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
# At this point, the closest parent would be the included file itself
Profile = GetParentAtLine(X.OriginalLineNumber)
- if Profile != None:
+ if Profile is not None:
X.Message += ' near line %d, column %d: %s' \
% (X.LineNumber, 0, Profile.FileLinesList[X.LineNumber-1])
else:
@@ -1539,7 +1539,7 @@ class FdfParser:
while self.__GetTokenStatements(FdObj):
pass
for Attr in ("BaseAddress", "Size", "ErasePolarity"):
- if getattr(FdObj, Attr) == None:
+ if getattr(FdObj, Attr) is None:
self.__GetNextToken()
raise Warning("Keyword %s missing" % Attr, self.FileName, self.CurrentLineNumber)
@@ -1694,7 +1694,7 @@ class FdfParser:
IsBlock = True
Item = Obj.BlockSizeList[-1]
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
return IsBlock
@@ -1862,7 +1862,7 @@ class FdfParser:
#
def __GetRegionLayout(self, Fd):
Offset = self.__CalcRegionExpr()
- if Offset == None:
+ if Offset is None:
return False
RegionObj = Region.Region()
@@ -1873,7 +1873,7 @@ class FdfParser:
raise Warning("expected '|'", self.FileName, self.CurrentLineNumber)
Size = self.__CalcRegionExpr()
- if Size == None:
+ if Size is None:
raise Warning("expected Region Size", self.FileName, self.CurrentLineNumber)
RegionObj.Size = Size
@@ -2973,7 +2973,7 @@ class FdfParser:
FvImageSectionObj = FvImageSection.FvImageSection()
FvImageSectionObj.Alignment = AlignValue
- if FvObj != None:
+ if FvObj is not None:
FvImageSectionObj.Fv = FvObj
FvImageSectionObj.FvName = None
else:
@@ -3791,7 +3791,7 @@ class FdfParser:
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
while True:
@@ -3847,7 +3847,7 @@ class FdfParser:
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
Rule.FileExtension = Ext
Rule.FileName = self.__Token
@@ -3986,7 +3986,7 @@ class FdfParser:
EfiSectionObj.KeepReloc = False
else:
EfiSectionObj.KeepReloc = True
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
else:
raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
@@ -4313,7 +4313,7 @@ class FdfParser:
raise Warning("expected Component version", self.FileName, self.CurrentLineNumber)
Pattern = re.compile('-$|[0-9a-fA-F]{1,2}\.[0-9a-fA-F]{1,2}$', re.DOTALL)
- if Pattern.match(self.__Token) == None:
+ if Pattern.match(self.__Token) is None:
raise Warning("Unknown version format '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
CompStatementObj.CompVer = self.__Token
@@ -4577,7 +4577,7 @@ class FdfParser:
for elementRegionData in elementRegion.RegionDataList:
if elementRegionData.endswith(".cap"):
continue
- if elementRegionData != None and elementRegionData.upper() not in CapList:
+ if elementRegionData is not None and elementRegionData.upper() not in CapList:
CapList.append(elementRegionData.upper())
return CapList
@@ -4593,15 +4593,15 @@ class FdfParser:
def __GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):
for CapsuleDataObj in CapObj.CapsuleDataList :
- if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName != None and CapsuleDataObj.FvName.upper() not in RefFvList:
+ if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
RefFvList.append (CapsuleDataObj.FvName.upper())
- elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName != None and CapsuleDataObj.FdName.upper() not in RefFdList:
+ elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
RefFdList.append (CapsuleDataObj.FdName.upper())
- elif CapsuleDataObj.Ffs != None:
+ elif CapsuleDataObj.Ffs is not None:
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
- if CapsuleDataObj.Ffs.FvName != None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
+ if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())
- elif CapsuleDataObj.Ffs.FdName != None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
+ elif CapsuleDataObj.Ffs.FdName is not None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)
@@ -4624,7 +4624,7 @@ class FdfParser:
for elementRegionData in elementRegion.RegionDataList:
if elementRegionData.endswith(".fv"):
continue
- if elementRegionData != None and elementRegionData.upper() not in FvList:
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:
FvList.append(elementRegionData.upper())
return FvList
@@ -4641,9 +4641,9 @@ class FdfParser:
for FfsObj in FvObj.FfsList:
if isinstance(FfsObj, FfsFileStatement.FileStatement):
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
RefFvList.append(FfsObj.FvName.upper())
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
RefFdList.append(FfsObj.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
@@ -4664,9 +4664,9 @@ class FdfParser:
while SectionStack != []:
SectionObj = SectionStack.pop()
if isinstance(SectionObj, FvImageSection.FvImageSection):
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
FvList.append(SectionObj.FvName.upper())
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
FvList.append(SectionObj.Fv.UiFvName.upper())
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
index 12ec95b56501..3fd5a9c2158a 100644
--- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
@@ -59,7 +59,7 @@ class FileStatement (FileStatementClassObject) :
#
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
- if self.NameGuid != None and self.NameGuid.startswith('PCD('):
+ if self.NameGuid is not None and self.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -81,7 +81,7 @@ class FileStatement (FileStatementClassObject) :
Dict.update(self.DefineVarDict)
SectionAlignments = None
- if self.FvName != None :
+ if self.FvName is not None :
Buffer = StringIO.StringIO('')
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
@@ -89,14 +89,14 @@ class FileStatement (FileStatementClassObject) :
FileName = Fv.AddToBuffer(Buffer)
SectionFiles = [FileName]
- elif self.FdName != None:
+ elif self.FdName is not None:
if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
FileName = Fd.GenFd()
SectionFiles = [FileName]
- elif self.FileName != None:
+ elif self.FileName is not None:
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
FileContent = ''
@@ -110,7 +110,7 @@ class FileStatement (FileStatementClassObject) :
Content = f.read()
f.close()
AlignValue = 1
- if self.SubAlignment[Index] != None:
+ if self.SubAlignment[Index] is not None:
AlignValue = GenFdsGlobalVariable.GetAlignment(self.SubAlignment[Index])
if AlignValue > MaxAlignValue:
MaxAlignIndex = Index
@@ -151,7 +151,7 @@ class FileStatement (FileStatementClassObject) :
section.FvAddr = FvChildAddr.pop(0)
elif isinstance(section, GuidSection):
section.FvAddr = FvChildAddr
- if FvParentAddr != None and isinstance(section, GuidSection):
+ if FvParentAddr is not None and isinstance(section, GuidSection):
section.FvParentAddr = FvParentAddr
if self.KeepReloc == False:
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index a34823391171..0dbffffc9a15 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -185,7 +185,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
InfLowerPath = str(PathClassObj).lower()
if self.OverrideGuid:
PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
- if self.CurrentArch != None:
+ if self.CurrentArch is not None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
#
@@ -194,14 +194,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
- if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
if Inf.AutoGenVersion < 0x00010005:
self.ModuleType = Inf.ComponentType
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
self.SourceFileList = Inf.Sources
- if self.KeepReloc == None and Inf.Shadow:
+ if self.KeepReloc is None and Inf.Shadow:
self.ShadowFromInfFile = Inf.Shadow
else:
@@ -209,7 +209,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
- if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
@@ -231,7 +231,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if self.ModuleType == 'MM_CORE_STANDALONE' and int(self.PiSpecVersion, 16) < 0x00010032:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
- if Inf._Defs != None and len(Inf._Defs) > 0:
+ if Inf._Defs is not None and len(Inf._Defs) > 0:
self.OptRomDefs.update(Inf._Defs)
self.PatchPcds = []
@@ -476,7 +476,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# Allow binary type module not specify override rule in FDF file.
#
if len(self.BinFileList) > 0:
- if self.Rule == None or self.Rule == "":
+ if self.Rule is None or self.Rule == "":
self.Rule = "BINARY"
if not IsMakefile and GenFdsGlobalVariable.EnableGenfdsMultiThread and self.Rule != 'BINARY':
@@ -545,7 +545,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GetRule__ (self) :
CurrentArchList = []
- if self.CurrentArch == None:
+ if self.CurrentArch is None:
CurrentArchList = ['common']
else:
CurrentArchList.append(self.CurrentArch)
@@ -556,13 +556,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
CurrentArch.upper() + \
'.' + \
self.ModuleType.upper()
- if self.Rule != None:
+ if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
- if Rule != None:
+ if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
@@ -572,7 +572,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
'.' + \
self.ModuleType.upper()
- if self.Rule != None:
+ if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
@@ -580,11 +580,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
- if Rule != None:
+ if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
- if Rule == None :
+ if Rule is None :
EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
% (RuleName, self.InfFileName))
@@ -601,7 +601,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
DscArchList = []
for Arch in GenFdsGlobalVariable.ArchList :
PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
- if PlatformDataBase != None:
+ if PlatformDataBase is not None:
if InfFileKey in PlatformDataBase.Modules:
DscArchList.append (Arch)
else:
@@ -648,7 +648,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
ArchList = CurArchList
UseArchList = TargetArchList
- if self.UseArch != None:
+ if self.UseArch is not None:
UseArchList = []
UseArchList.append(self.UseArch)
ArchList = list(set (UseArchList) & set (ArchList))
@@ -689,7 +689,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if self.OverrideGuid:
FileName = self.OverrideGuid
Arch = "NoneArch"
- if self.CurrentArch != None:
+ if self.CurrentArch is not None:
Arch = self.CurrentArch
OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
@@ -723,7 +723,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
FileList = []
OutputFileList = []
GenSecInputFile = None
- if Rule.FileName != None:
+ if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
if os.path.isabs(GenSecInputFile):
GenSecInputFile = os.path.normpath(GenSecInputFile)
@@ -748,11 +748,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
NoStrip = True
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
- if self.KeepReloc != None:
+ if self.KeepReloc is not None:
NoStrip = self.KeepReloc
- elif Rule.KeepReloc != None:
+ elif Rule.KeepReloc is not None:
NoStrip = Rule.KeepReloc
- elif self.ShadowFromInfFile != None:
+ elif self.ShadowFromInfFile is not None:
NoStrip = self.ShadowFromInfFile
if FileList != [] :
@@ -868,7 +868,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
InputSection.append(InputFile)
SectionAlignments.append(Rule.SectAlignment)
- if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -902,7 +902,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GenComplexFileSection__(self, Rule, FvChildAddr, FvParentAddr, IsMakefile = False):
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
- if Rule.KeepReloc != None:
+ if Rule.KeepReloc is not None:
self.KeepRelocFromRule = Rule.KeepReloc
SectFiles = []
SectAlignments = []
@@ -957,7 +957,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
Sect.FvAddr = FvChildAddr.pop(0)
elif isinstance(Sect, GuidSection):
Sect.FvAddr = FvChildAddr
- if FvParentAddr != None and isinstance(Sect, GuidSection):
+ if FvParentAddr is not None and isinstance(Sect, GuidSection):
Sect.FvParentAddr = FvParentAddr
if Rule.KeyStringList != []:
@@ -1040,7 +1040,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GenComplexFileFfs__(self, Rule, InputFile, Alignments, MakefilePath = None):
- if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -1079,7 +1079,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if Rule.CheckSum != False:
result += ('-s',)
- if Rule.Alignment != None and Rule.Alignment != '':
+ if Rule.Alignment is not None and Rule.Alignment != '':
result += ('-a', Rule.Alignment)
return result
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py
index c0b869d250f1..14e36b885966 100644
--- a/BaseTools/Source/Python/GenFds/Fv.py
+++ b/BaseTools/Source/Python/GenFds/Fv.py
@@ -70,14 +70,14 @@ class FV (FvClassObject):
#
def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict = {}, Flag=False) :
- if BaseAddress == None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
+ if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
#
# Check whether FV in Capsule is in FD flash region.
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
#
- if self.CapsuleName != None:
+ if self.CapsuleName is not None:
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
for RegionObj in FdObj.RegionList:
@@ -94,7 +94,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
- if self.FvBaseAddress != None:
+ if self.FvBaseAddress is not None:
BaseAddress = self.FvBaseAddress
if not Flag:
self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict)
@@ -136,7 +136,7 @@ class FV (FvClassObject):
FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
FvOutputFile = FvOutputFile + '.Fv'
# BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
- if self.CreateFileName != None:
+ if self.CreateFileName is not None:
FvOutputFile = self.CreateFileName
if Flag:
@@ -163,7 +163,7 @@ class FV (FvClassObject):
NewFvInfo = None
if os.path.exists (FvInfoFileName):
NewFvInfo = open(FvInfoFileName, 'r').read()
- if NewFvInfo != None and NewFvInfo != OrigFvInfo:
+ if NewFvInfo is not None and NewFvInfo != OrigFvInfo:
FvChildAddr = []
AddFileObj = open(FvInfoFileName, 'r')
AddrStrings = AddFileObj.readlines()
@@ -273,16 +273,16 @@ class FV (FvClassObject):
# Add [Options]
#
self.FvInfFile.writelines("[options]" + T_CHAR_LF)
- if BaseAddress != None :
+ if BaseAddress is not None :
self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \
BaseAddress + \
T_CHAR_LF)
- if BlockSize != None:
+ if BlockSize is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize + \
T_CHAR_LF)
- if BlockNum != None:
+ if BlockNum is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockNum + \
T_CHAR_LF)
@@ -293,20 +293,20 @@ class FV (FvClassObject):
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + T_CHAR_LF)
for BlockSize in self.BlockSizeList :
- if BlockSize[0] != None:
+ if BlockSize[0] is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize[0] + \
T_CHAR_LF)
- if BlockSize[1] != None:
+ if BlockSize[1] is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockSize[1] + \
T_CHAR_LF)
- if self.BsBaseAddress != None:
+ if self.BsBaseAddress is not None:
self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.BsBaseAddress)
- if self.RtBaseAddress != None:
+ if self.RtBaseAddress is not None:
self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.RtBaseAddress)
#
@@ -317,7 +317,7 @@ class FV (FvClassObject):
self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \
' %s' %ErasePloarity + \
T_CHAR_LF)
- if not (self.FvAttributeDict == None):
+ if not (self.FvAttributeDict is None):
for FvAttribute in self.FvAttributeDict.keys() :
if FvAttribute == "FvUsedSizeEnable":
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1') :
@@ -328,7 +328,7 @@ class FV (FvClassObject):
' = ' + \
self.FvAttributeDict[FvAttribute] + \
T_CHAR_LF )
- if self.FvAlignment != None:
+ if self.FvAlignment is not None:
self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \
self.FvAlignment.strip() + \
" = TRUE" + \
@@ -337,7 +337,7 @@ class FV (FvClassObject):
#
# Generate FV extension header file
#
- if self.FvNameGuid == None or self.FvNameGuid == '':
+ if self.FvNameGuid is None or self.FvNameGuid == '':
if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable:
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
@@ -442,7 +442,7 @@ class FV (FvClassObject):
# Add [Files]
#
self.FvInfFile.writelines("[files]" + T_CHAR_LF)
- if VtfDict != None and self.UiFvName in VtfDict.keys():
+ if VtfDict is not None and self.UiFvName in VtfDict.keys():
self.FvInfFile.writelines("EFI_FILE_NAME = " + \
VtfDict.get(self.UiFvName) + \
T_CHAR_LF)
diff --git a/BaseTools/Source/Python/GenFds/FvImageSection.py b/BaseTools/Source/Python/GenFds/FvImageSection.py
index 916ff919176c..5026a3ffca2f 100644
--- a/BaseTools/Source/Python/GenFds/FvImageSection.py
+++ b/BaseTools/Source/Python/GenFds/FvImageSection.py
@@ -53,7 +53,7 @@ class FvImageSection(FvImageSectionClassObject):
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
OutputFileList = []
- if self.FvFileType != None:
+ if self.FvFileType is not None:
FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FvFileType, self.FvFileExtension)
if IsSect :
return FileList, self.Alignment
@@ -96,20 +96,20 @@ class FvImageSection(FvImageSectionClassObject):
#
# Generate Fv
#
- if self.FvName != None:
+ if self.FvName is not None:
Buffer = StringIO.StringIO('')
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
- if Fv != None:
+ if Fv is not None:
self.Fv = Fv
FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict = Dict, Flag=IsMakefile)
- if Fv.FvAlignment != None:
- if self.Alignment == None:
+ if Fv.FvAlignment is not None:
+ if self.Alignment is None:
self.Alignment = Fv.FvAlignment
else:
if GenFdsGlobalVariable.GetAlignment (Fv.FvAlignment) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
self.Alignment = Fv.FvAlignment
else:
- if self.FvFileName != None:
+ if self.FvFileName is not None:
FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
if os.path.isfile(FvFileName):
FvFileObj = open (FvFileName,'rb')
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py
index 03126e35f47a..515cfd06ccb0 100644
--- a/BaseTools/Source/Python/GenFds/GenFds.py
+++ b/BaseTools/Source/Python/GenFds/GenFds.py
@@ -69,22 +69,22 @@ def main():
EdkLogger.Initialize()
try:
- if Options.verbose != None:
+ if Options.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
GenFdsGlobalVariable.VerboseMode = True
- if Options.FixedAddress != None:
+ if Options.FixedAddress is not None:
GenFdsGlobalVariable.FixedLoadAddress = True
- if Options.quiet != None:
+ if Options.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- if Options.debug != None:
+ if Options.debug is not None:
EdkLogger.SetLevel(Options.debug + 1)
GenFdsGlobalVariable.DebugLevel = Options.debug
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if (Options.Workspace == None):
+ if (Options.Workspace is None):
EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined",
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
elif not os.path.exists(Options.Workspace):
@@ -179,7 +179,7 @@ def main():
# if no tool chain given in command line, get it from target.txt
if not GenFdsGlobalVariable.ToolChainTag:
ToolChainList = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
- if ToolChainList == None or len(ToolChainList) == 0:
+ if ToolChainList is None or len(ToolChainList) == 0:
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.")
if len(ToolChainList) != 1:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.")
@@ -300,7 +300,7 @@ def main():
"No such a Capsule in FDF file: %s" % Options.uiCapName)
GenFdsGlobalVariable.WorkSpace = BuildWorkSpace
- if ArchList != None:
+ if ArchList is not None:
GenFdsGlobalVariable.ArchList = ArchList
# Dsc Build Data will handle Pcd Settings from CommandLine.
@@ -340,7 +340,7 @@ def main():
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except FatalError, X:
- if Options.debug != None:
+ if Options.debug is not None:
import traceback
EdkLogger.quiet(traceback.format_exc())
ReturnCode = X.args[0]
@@ -378,7 +378,7 @@ def SingleCheckCallback(option, opt_str, value, parser):
def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
# if user not specify filter, try to deduce it from global data.
- if KeyStringList == None or KeyStringList == []:
+ if KeyStringList is None or KeyStringList == []:
Target = GenFdsGlobalVariable.TargetName
ToolChain = GenFdsGlobalVariable.ToolChainTag
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
@@ -411,7 +411,7 @@ def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
ToolOptionKey = Key + '_' + KeyList[3] + '_FLAGS'
ToolPath = ToolDefinition.get(ToolPathKey)
ToolOption = ToolDefinition.get(ToolOptionKey)
- if ToolPathTmp == None:
+ if ToolPathTmp is None:
ToolPathTmp = ToolPath
else:
if ToolPathTmp != ToolPath:
@@ -523,38 +523,38 @@ class GenFds :
GenFdsGlobalVariable.SetDir ('', FdfParser, WorkSpace, ArchList)
GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!")
- if GenFds.OnlyGenerateThisCap != None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
+ if GenFds.OnlyGenerateThisCap is not None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.get(GenFds.OnlyGenerateThisCap.upper())
- if CapsuleObj != None:
+ if CapsuleObj is not None:
CapsuleObj.GenCapsule()
return
- if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(GenFds.OnlyGenerateThisFd.upper())
- if FdObj != None:
+ if FdObj is not None:
FdObj.GenFd()
return
- elif GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisFv == None:
+ elif GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisFv is None:
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
FdObj.GenFd()
GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ")
- if GenFds.OnlyGenerateThisFv != None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ if GenFds.OnlyGenerateThisFv is not None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper())
- if FvObj != None:
+ if FvObj is not None:
Buffer = StringIO.StringIO()
FvObj.AddToBuffer(Buffer)
Buffer.close()
return
- elif GenFds.OnlyGenerateThisFv == None:
+ elif GenFds.OnlyGenerateThisFv is None:
for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
Buffer = StringIO.StringIO('')
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName]
FvObj.AddToBuffer(Buffer)
Buffer.close()
- if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisCap == None:
+ if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
for CapsuleName in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
@@ -592,14 +592,14 @@ class GenFds :
def GetFvBlockSize(FvObj):
DefaultBlockSize = 0x1
FdObj = None
- if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
- if FdObj == None:
+ if FdObj is None:
for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
for ElementRegion in ElementFd.RegionList:
if ElementRegion.RegionType == 'FV':
for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
@@ -611,7 +611,7 @@ class GenFds :
for ElementRegion in FdObj.RegionList:
if ElementRegion.RegionType == 'FV':
for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
index 97e20753ae9b..fcb191981c95 100644
--- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
+++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
@@ -229,7 +229,7 @@ class GenFdsGlobalVariable:
Source = SourceList[Index]
Index = Index + 1
- if File.IsBinary and File == Source and Inf.Binaries != None and File in Inf.Binaries:
+ if File.IsBinary and File == Source and Inf.Binaries is not None and File in Inf.Binaries:
# Skip all files that are not binary libraries
if not Inf.LibraryClass:
continue
@@ -420,7 +420,7 @@ class GenFdsGlobalVariable:
if not os.path.exists(Output):
return True
# always update "Output" if no "Input" given
- if Input == None or len(Input) == 0:
+ if Input is None or len(Input) == 0:
return True
# if fdf file is changed after the 'Output" is generated, update the 'Output'
@@ -445,9 +445,9 @@ class GenFdsGlobalVariable:
Cmd += ["-s", Type]
if CompressionType not in [None, '']:
Cmd += ["-c", CompressionType]
- if Guid != None:
+ if Guid is not None:
Cmd += ["-g", Guid]
- if DummyFile != None:
+ if DummyFile is not None:
Cmd += ["--dummy", DummyFile]
if GuidHdrLen not in [None, '']:
Cmd += ["-l", GuidHdrLen]
@@ -455,7 +455,7 @@ class GenFdsGlobalVariable:
#Add each guided attribute
for Attr in GuidAttr:
Cmd += ["-r", Attr]
- if InputAlign != None:
+ if InputAlign is not None:
#Section Align is only for dummy section without section type
for SecAlign in InputAlign:
Cmd += ["--sectionalign", SecAlign]
@@ -509,7 +509,7 @@ class GenFdsGlobalVariable:
@staticmethod
def GetAlignment (AlignString):
- if AlignString == None:
+ if AlignString is None:
return 0
if AlignString in ("1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K"):
return int (AlignString.rstrip('K')) * 1024
@@ -669,13 +669,13 @@ class GenFdsGlobalVariable:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
- if ClassCode != None:
+ if ClassCode is not None:
Cmd += ["-l", ClassCode]
- if Revision != None:
+ if Revision is not None:
Cmd += ["-r", Revision]
- if DeviceId != None:
+ if DeviceId is not None:
Cmd += ["-i", DeviceId]
- if VendorId != None:
+ if VendorId is not None:
Cmd += ["-f", VendorId]
Cmd += ["-o", Output]
@@ -726,7 +726,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
(out, error) = PopenObject.communicate()
- while PopenObject.returncode == None :
+ while PopenObject.returncode is None :
PopenObject.wait()
if returnValue != [] and returnValue[0] != 0:
#get command return value
@@ -758,7 +758,7 @@ class GenFdsGlobalVariable:
# @param MacroDict Dictionary that contains macro value pair
#
def MacroExtend (Str, MacroDict={}, Arch='COMMON'):
- if Str == None :
+ if Str is None :
return None
Dict = {'$(WORKSPACE)' : GenFdsGlobalVariable.WorkSpaceDir,
@@ -774,7 +774,7 @@ class GenFdsGlobalVariable:
Dict['$(OUTPUT_DIRECTORY)'] = OutputDir
- if MacroDict != None and len (MacroDict) != 0:
+ if MacroDict is not None and len (MacroDict) != 0:
Dict.update(MacroDict)
for key in Dict.keys():
@@ -794,7 +794,7 @@ class GenFdsGlobalVariable:
# @param PcdPattern pattern that labels a PCD.
#
def GetPcdValue (PcdPattern):
- if PcdPattern == None :
+ if PcdPattern is None :
return None
PcdPair = PcdPattern.lstrip('PCD(').rstrip(')').strip().split('.')
TokenSpace = PcdPair[0]
diff --git a/BaseTools/Source/Python/GenFds/GuidSection.py b/BaseTools/Source/Python/GenFds/GuidSection.py
index ea737bb9a7ea..8362073f97a3 100644
--- a/BaseTools/Source/Python/GenFds/GuidSection.py
+++ b/BaseTools/Source/Python/GenFds/GuidSection.py
@@ -60,7 +60,7 @@ class GuidSection(GuidSectionClassObject) :
#
self.KeyStringList = KeyStringList
self.CurrentArchList = GenFdsGlobalVariable.ArchList
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
@@ -79,7 +79,7 @@ class GuidSection(GuidSectionClassObject) :
if self.FvAddr != []:
#no use FvAddr when the image is processed.
self.FvAddr = []
- if self.FvParentAddr != None:
+ if self.FvParentAddr is not None:
#no use Parent Addr when the image is processed.
self.FvParentAddr = None
@@ -99,20 +99,20 @@ class GuidSection(GuidSectionClassObject) :
if Sect.IncludeFvSection:
self.IncludeFvSection = Sect.IncludeFvSection
- if align != None:
- if MaxAlign == None:
+ if align is not None:
+ if MaxAlign is None:
MaxAlign = align
if GenFdsGlobalVariable.GetAlignment (align) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
MaxAlign = align
if ReturnSectList != []:
- if align == None:
+ if align is None:
align = "1"
for file in ReturnSectList:
SectFile += (file,)
SectAlign.append(align)
- if MaxAlign != None:
- if self.Alignment == None:
+ if MaxAlign is not None:
+ if self.Alignment is None:
self.Alignment = MaxAlign
else:
if GenFdsGlobalVariable.GetAlignment (MaxAlign) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
@@ -128,21 +128,21 @@ class GuidSection(GuidSectionClassObject) :
ExternalTool = None
ExternalOption = None
- if self.NameGuid != None:
+ if self.NameGuid is not None:
ExternalTool, ExternalOption = FindExtendTool(self.KeyStringList, self.CurrentArchList, self.NameGuid)
#
# If not have GUID , call default
# GENCRC32 section
#
- if self.NameGuid == None :
+ if self.NameGuid is None :
GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
#or GUID not in External Tool List
- elif ExternalTool == None:
+ elif ExternalTool is None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
else:
DummyFile = OutputFile + ".dummy"
@@ -170,10 +170,10 @@ class GuidSection(GuidSectionClassObject) :
FirstCall = False
CmdOption = '-e'
- if ExternalOption != None:
+ if ExternalOption is not None:
CmdOption = CmdOption + ' ' + ExternalOption
if not GenFdsGlobalVariable.EnableGenfdsMultiThread:
- if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr != None:
+ if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr is not None:
#FirstCall is only set for the encapsulated flash FV image without process required attribute.
FirstCall = True
#
@@ -213,7 +213,7 @@ class GuidSection(GuidSectionClassObject) :
if self.ExtraHeaderSize != -1:
HeaderLength = str(self.ExtraHeaderSize)
- if self.ProcessRequired == "NONE" and HeaderLength == None:
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
if TempFileSize > InputFileSize:
FileHandleIn.seek(0)
BufferIn = FileHandleIn.read()
@@ -222,7 +222,7 @@ class GuidSection(GuidSectionClassObject) :
if BufferIn == BufferOut[TempFileSize - InputFileSize:]:
HeaderLength = str(TempFileSize - InputFileSize)
#auto sec guided attribute with process required
- if HeaderLength == None:
+ if HeaderLength is None:
Attribute.append('PROCESSING_REQUIRED')
FileHandleIn.close()
@@ -253,7 +253,7 @@ class GuidSection(GuidSectionClassObject) :
HeaderLength = str(self.ExtraHeaderSize)
if self.AuthStatusValid in ("TRUE", "1"):
Attribute.append('AUTH_STATUS_VALID')
- if self.ProcessRequired == "NONE" and HeaderLength == None:
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
Guid=self.NameGuid, GuidAttr=Attribute,
GuidHdrLen=HeaderLength, DummyFile=DummyFile, IsMakefile=IsMakefile)
diff --git a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
index ab4fae611e33..4ef9b4d0e9a8 100644
--- a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
@@ -41,7 +41,7 @@ class OptRomFileStatement:
#
def GenFfs(self, Dict = {}, IsMakefile=False):
- if self.FileName != None:
+ if self.FileName is not None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
return self.FileName
diff --git a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
index 80c4bbab6eff..62d731fb9cca 100644
--- a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
@@ -46,10 +46,10 @@ class OptRomInfStatement (FfsInfStatement):
#
def __GetOptRomParams(self):
- if self.OverrideAttribs == None:
+ if self.OverrideAttribs is None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
- if self.OverrideAttribs.NeedCompress == None:
+ if self.OverrideAttribs.NeedCompress is None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
@@ -57,16 +57,16 @@ class OptRomInfStatement (FfsInfStatement):
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
- if self.OverrideAttribs.PciVendorId == None:
+ if self.OverrideAttribs.PciVendorId is None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
- if self.OverrideAttribs.PciClassCode == None:
+ if self.OverrideAttribs.PciClassCode is None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
- if self.OverrideAttribs.PciDeviceId == None:
+ if self.OverrideAttribs.PciDeviceId is None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
- if self.OverrideAttribs.PciRevision == None:
+ if self.OverrideAttribs.PciRevision is None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
@@ -121,7 +121,7 @@ class OptRomInfStatement (FfsInfStatement):
#
OutputFileList = []
- if Rule.FileName != None:
+ if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
@@ -143,7 +143,7 @@ class OptRomInfStatement (FfsInfStatement):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == 'PE32':
- if Sect.FileName != None:
+ if Sect.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
diff --git a/BaseTools/Source/Python/GenFds/OptionRom.py b/BaseTools/Source/Python/GenFds/OptionRom.py
index 2e61a38c1d33..b05841529940 100644
--- a/BaseTools/Source/Python/GenFds/OptionRom.py
+++ b/BaseTools/Source/Python/GenFds/OptionRom.py
@@ -63,7 +63,7 @@ class OPTIONROM (OptionRomClassObject):
FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
if len(FilePathNameList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s not produce .efi files, so NO file could be put into option ROM." % (FfsFile.InfFileName))
- if FfsFile.OverrideAttribs == None:
+ if FfsFile.OverrideAttribs is None:
EfiFileList.extend(FilePathNameList)
else:
FileName = os.path.basename(FilePathNameList[0])
@@ -84,7 +84,7 @@ class OPTIONROM (OptionRomClassObject):
BinFileList.append(TmpOutputFile)
else:
FilePathName = FfsFile.GenFfs(IsMakefile=Flag)
- if FfsFile.OverrideAttribs != None:
+ if FfsFile.OverrideAttribs is not None:
FileName = os.path.basename(FilePathName)
TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName, FfsFile.CurrentArch)
if not os.path.exists(TmpOutputDir) :
diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py
index c946758cf549..e639739b7e03 100644
--- a/BaseTools/Source/Python/GenFds/Region.py
+++ b/BaseTools/Source/Python/GenFds/Region.py
@@ -114,7 +114,7 @@ class Region(RegionClassObject):
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
- if FvObj != None :
+ if FvObj is not None :
if not Flag:
GenFdsGlobalVariable.InfLogger(' Region Name = FV')
#
@@ -152,7 +152,7 @@ class Region(RegionClassObject):
# Add the exist Fv image into FD buffer
#
if not Flag:
- if FileName != None:
+ if FileName is not None:
FileLength = os.stat(FileName)[ST_SIZE]
if FileLength > Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
@@ -193,7 +193,7 @@ class Region(RegionClassObject):
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()]
- if CapsuleObj != None :
+ if CapsuleObj is not None :
CapsuleObj.CapsuleName = RegionData.upper()
GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
#
@@ -270,7 +270,7 @@ class Region(RegionClassObject):
#
self.PadBuffer(Buffer, ErasePolarity, Size)
- if self.RegionType == None:
+ if self.RegionType is None:
GenFdsGlobalVariable.InfLogger(' Region Name = None')
self.PadBuffer(Buffer, ErasePolarity, Size)
@@ -333,7 +333,7 @@ class Region(RegionClassObject):
# first check whether FvObj.BlockSizeList items have only "BlockSize" or "NumBlocks",
# if so, use ExpectedList
for Item in FvObj.BlockSizeList:
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
FvObj.BlockSizeList = ExpectedList
break
# make sure region size is no smaller than the summed block size in FV
diff --git a/BaseTools/Source/Python/GenFds/Section.py b/BaseTools/Source/Python/GenFds/Section.py
index 463faa378165..5e0b4bee7d1c 100644
--- a/BaseTools/Source/Python/GenFds/Section.py
+++ b/BaseTools/Source/Python/GenFds/Section.py
@@ -116,17 +116,17 @@ class Section (SectionClassObject):
else :
IsSect = False
- if FileExtension != None:
+ if FileExtension is not None:
Suffix = FileExtension
elif IsSect :
Suffix = Section.SectionType.get(FileType)
else:
Suffix = Section.BinFileType.get(FileType)
- if FfsInf == None:
+ if FfsInf is None:
EdkLogger.error("GenFds", GENFDS_ERROR, 'Inf File does not exist!')
FileList = []
- if FileType != None:
+ if FileType is not None:
for File in FfsInf.BinFileList:
if File.Arch == "COMMON" or FfsInf.CurrentArch == File.Arch:
if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A \
@@ -141,7 +141,7 @@ class Section (SectionClassObject):
else:
GenFdsGlobalVariable.InfLogger ("\nCurrent ARCH \'%s\' of File %s is not in the Support Arch Scope of %s specified by INF %s in FDF" %(FfsInf.CurrentArch, File.File, File.Arch, FfsInf.InfFileName))
- if (not IsMakefile and Suffix != None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix != None):
+ if (not IsMakefile and Suffix is not None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix is not None):
#
# Get Makefile path and time stamp
#
diff --git a/BaseTools/Source/Python/GenFds/UiSection.py b/BaseTools/Source/Python/GenFds/UiSection.py
index 4f6926f7cae4..6340520602ee 100644
--- a/BaseTools/Source/Python/GenFds/UiSection.py
+++ b/BaseTools/Source/Python/GenFds/UiSection.py
@@ -52,16 +52,16 @@ class UiSection (UiSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('UI'))
- if self.StringData != None :
+ if self.StringData is not None :
NameString = self.StringData
- elif self.FileName != None:
+ elif self.FileName is not None:
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
diff --git a/BaseTools/Source/Python/GenFds/VerSection.py b/BaseTools/Source/Python/GenFds/VerSection.py
index e29029980fad..11e974b9936e 100644
--- a/BaseTools/Source/Python/GenFds/VerSection.py
+++ b/BaseTools/Source/Python/GenFds/VerSection.py
@@ -52,7 +52,7 @@ class VerSection (VerSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
@@ -64,9 +64,9 @@ class VerSection (VerSectionClassObject):
# Get String Data
StringData = ''
- if self.StringData != None:
+ if self.StringData is not None:
StringData = self.StringData
- elif self.FileName != None:
+ elif self.FileName is not None:
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
diff --git a/BaseTools/Source/Python/GenFds/Vtf.py b/BaseTools/Source/Python/GenFds/Vtf.py
index 06e3d275c381..18ea37b9afdd 100644
--- a/BaseTools/Source/Python/GenFds/Vtf.py
+++ b/BaseTools/Source/Python/GenFds/Vtf.py
@@ -68,7 +68,7 @@ class Vtf (VtfClassObject):
FvList = self.GetFvList()
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
BsfInf = open(self.BsfInfName, 'w+')
- if self.ResetBin != None:
+ if self.ResetBin is not None:
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)
BsfInf.writelines ("IA32_RST_BIN" + \
" = " + \
@@ -89,7 +89,7 @@ class Vtf (VtfClassObject):
'N' + \
T_CHAR_LF)
- elif ComponentObj.FilePos != None:
+ elif ComponentObj.FilePos is not None:
BsfInf.writelines ("COMP_LOC" + \
" = " + \
ComponentObj.FilePos + \
diff --git a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
index fdad5a44dc3d..71895d4acddd 100644
--- a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
+++ b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
@@ -73,7 +73,7 @@ def _parseForXcode(lines, efifilepath):
if status == 1 and len(line) != 0:
if '_gPcd_BinaryPatch_' in line:
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*_gPcd_BinaryPatch_([\w]+))', line)
- if m != None:
+ if m is not None:
pcds.append((m.groups(0)[3], int(m.groups(0)[0], 16)))
return pcds
@@ -99,20 +99,20 @@ def _parseForGCC(lines, efifilepath):
# status handler
if status == 3:
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
- if m != None:
+ if m is not None:
sections.append(m.groups(0))
if status == 3:
m = re.match('^.data._gPcd_BinaryPatch_([\w_\d]+)$', line)
- if m != None:
+ if m is not None:
if lines[index + 1]:
PcdName = m.groups(0)[0]
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', lines[index + 1].strip())
- if m != None:
+ if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return None
#redirection
redirection = 0
@@ -152,18 +152,18 @@ def _parseGeneral(lines, efifilepath):
continue
if status == 1 and len(line) != 0:
m = secRe.match(line)
- assert m != None, "Fail to parse the section in map file , line is %s" % line
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
m = symRe.match(line)
- assert m != None, "Fail to parse the symbol in map file, line is %s" % line
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name)
- if m2 != None:
+ if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
@@ -173,7 +173,7 @@ def _parseGeneral(lines, efifilepath):
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return None
pcds = []
@@ -214,12 +214,12 @@ if __name__ == '__main__':
(options, args) = parser.parse_args()
- if options.mapfile == None or options.efifile == None:
+ if options.mapfile is None or options.efifile is None:
print parser.get_usage()
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
- if list != None:
- if options.outfile != None:
+ if list is not None:
+ if options.outfile is not None:
generatePcdTable(list, options.outfile)
else:
generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
diff --git a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
index 942ba88d200f..0c8009cb0b44 100644
--- a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
+++ b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
@@ -267,13 +267,13 @@ def Main():
if not os.path.exists (InputFile):
EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile)
return 1
- if CommandOptions.PcdOffset == None or CommandOptions.PcdValue == None or CommandOptions.PcdTypeName == None:
+ if CommandOptions.PcdOffset is None or CommandOptions.PcdValue is None or CommandOptions.PcdTypeName is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
return 1
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
return 1
- if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:
+ if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")
return 1
#
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py
index bfdf763a7abc..ede9713c9b8b 100644
--- a/BaseTools/Source/Python/TargetTool/TargetTool.py
+++ b/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -85,7 +85,7 @@ class TargetTool():
for Key in KeyList:
if type(self.TargetTxtDictionary[Key]) == type([]):
print "%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key]))
- elif self.TargetTxtDictionary[Key] == None:
+ elif self.TargetTxtDictionary[Key] is None:
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
else:
print "%-30s = %s" % (Key, self.TargetTxtDictionary[Key])
@@ -116,14 +116,14 @@ class TargetTool():
Line = "%-30s = \n" % Key
else:
ret = GetConfigureKeyValue(self, Key)
- if ret != None:
+ if ret is not None:
Line = ret
fw.write(Line)
for key in self.TargetTxtDictionary.keys():
if key not in existKeys:
print "Warning: %s does not exist in original configuration file" % key
Line = GetConfigureKeyValue(self, key)
- if Line == None:
+ if Line is None:
Line = "%-30s = " % key
fw.write(Line)
@@ -138,14 +138,14 @@ class TargetTool():
def GetConfigureKeyValue(self, Key):
Line = None
- if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE != None:
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE is not None:
dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
- elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE != None:
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
@@ -157,15 +157,15 @@ def GetConfigureKeyValue(self, Key):
Line = "%-30s = %s\n" % (Key, 'Enable')
elif self.Opt.NUM <= 1:
Line = "%-30s = %s\n" % (Key, 'Disable')
- elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM != None:
+ elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
- elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET != None:
+ elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
- elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH != None:
+ elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
- elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG != None:
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG is not None:
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
- elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE != None:
+ elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE is not None:
buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
if os.path.exists(buildruleFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
@@ -223,7 +223,7 @@ def MyOptionParser():
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.QUIET)
- if os.getenv('WORKSPACE') == None:
+ if os.getenv('WORKSPACE') is None:
print "ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool"
sys.exit(1)
@@ -231,15 +231,15 @@ if __name__ == '__main__':
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
print "The number of args isn't 1 or the value of args is invalid."
sys.exit(1)
- if opt.NUM != None and opt.NUM < 1:
+ if opt.NUM is not None and opt.NUM < 1:
print "The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0."
sys.exit(1)
- if opt.TARGET != None and len(opt.TARGET) > 1:
+ if opt.TARGET is not None and len(opt.TARGET) > 1:
for elem in opt.TARGET:
if elem == '0':
print "0 will clear the TARGET setting in target.txt and can't combine with other value."
sys.exit(1)
- if opt.TARGET_ARCH != None and len(opt.TARGET_ARCH) > 1:
+ if opt.TARGET_ARCH is not None and len(opt.TARGET_ARCH) > 1:
for elem in opt.TARGET_ARCH:
if elem == '0':
print "0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value."
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py
index d1e40b025caa..d07edbd5d872 100644
--- a/BaseTools/Source/Python/Trim/Trim.py
+++ b/BaseTools/Source/Python/Trim/Trim.py
@@ -173,7 +173,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
continue
- if LineIndexOfOriginalFile == None:
+ if LineIndexOfOriginalFile is None:
#
# Any non-empty lines must be from original preprocessed file.
# And this must be the first one.
@@ -193,7 +193,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
# convert Decimal number format
Line = gDecNumberPattern.sub(r"\1", Line)
- if LineNumber != None:
+ if LineNumber is not None:
EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
# in case preprocessor removed some lines, like blank or comment lines
if LineNumber <= len(NewLines):
@@ -216,10 +216,10 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
Brace = 0
for Index in range(len(Lines)):
Line = Lines[Index]
- if MulPatternFlag == False and gTypedef_MulPattern.search(Line) == None:
- if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) == None:
+ if MulPatternFlag == False and gTypedef_MulPattern.search(Line) is None:
+ if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) is None:
# remove "#pragram pack" directive
- if gPragmaPattern.search(Line) == None:
+ if gPragmaPattern.search(Line) is None:
NewLines.append(Line)
continue
elif SinglePatternFlag == False:
@@ -282,9 +282,9 @@ def TrimPreprocessedVfr(Source, Target):
Lines[Index] = "\n"
continue
- if FoundTypedef == False and gTypedefPattern.search(Line) == None:
+ if FoundTypedef == False and gTypedefPattern.search(Line) is None:
# keep "#pragram pack" directive
- if gPragmaPattern.search(Line) == None:
+ if gPragmaPattern.search(Line) is None:
Lines[Index] = "\n"
continue
elif FoundTypedef == False:
@@ -510,7 +510,7 @@ def TrimEdkSources(Source, Target):
for FileName in Files:
Dummy, Ext = os.path.splitext(FileName)
if Ext.upper() not in ['.C', '.H']: continue
- if Target == None or Target == '':
+ if Target is None or Target == '':
TrimEdkSourceCode(
os.path.join(CurrentDir, FileName),
os.path.join(CurrentDir, FileName)
@@ -568,7 +568,7 @@ def TrimEdkSourceCode(Source, Target):
NewLines = None
for Re,Repl in gImportCodePatterns:
- if NewLines == None:
+ if NewLines is None:
NewLines = Re.sub(Repl, Lines)
else:
NewLines = Re.sub(Repl, NewLines)
@@ -672,11 +672,11 @@ def Main():
try:
if CommandOptions.FileType == "Vfr":
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile)
elif CommandOptions.FileType == "Asl":
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile)
elif CommandOptions.FileType == "EdkSourceCode":
@@ -684,13 +684,13 @@ def Main():
elif CommandOptions.FileType == "VfrOffsetBin":
GenerateVfrBinSec(CommandOptions.ModuleName, CommandOptions.DebugDir, CommandOptions.OutputFile)
else :
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong)
except FatalError, X:
import platform
import traceback
- if CommandOptions != None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
+ if CommandOptions is not None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
return 1
except:
diff --git a/BaseTools/Source/Python/UPT/Core/DependencyRules.py b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
index 26c5a97da80f..2af847ed2e0b 100644
--- a/BaseTools/Source/Python/UPT/Core/DependencyRules.py
+++ b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
@@ -104,12 +104,12 @@ class DependencyRules(object):
# check whether satisfied by current distribution
#
if not Exist:
- if DpObj == None:
+ if DpObj is None:
Result = False
break
for GuidVerPair in DpObj.PackageSurfaceArea.keys():
if Dep.GetGuid() == GuidVerPair[0]:
- if Dep.GetVersion() == None or \
+ if Dep.GetVersion() is None or \
len(Dep.GetVersion()) == 0:
Result = True
break
diff --git a/BaseTools/Source/Python/UPT/Core/IpiDb.py b/BaseTools/Source/Python/UPT/Core/IpiDb.py
index f147963288ad..78d67ab31e1e 100644
--- a/BaseTools/Source/Python/UPT/Core/IpiDb.py
+++ b/BaseTools/Source/Python/UPT/Core/IpiDb.py
@@ -247,13 +247,13 @@ class IpiDatabase(object):
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
RePackage):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
#
# Add newly installed DP information to DB.
#
- if NewDpFileName == None or len(NewDpFileName.strip()) == 0:
+ if NewDpFileName is None or len(NewDpFileName.strip()) == 0:
PkgFileName = 'N/A'
else:
PkgFileName = NewDpFileName
@@ -295,13 +295,13 @@ class IpiDatabase(object):
#
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if DpGuid == None or len(DpGuid.strip()) == 0:
+ if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
- if DpVersion == None or len(DpVersion.strip()) == 0:
+ if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
@@ -325,13 +325,13 @@ class IpiDatabase(object):
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
PkgVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if PkgGuid == None or len(PkgGuid.strip()) == 0:
+ if PkgGuid is None or len(PkgGuid.strip()) == 0:
PkgGuid = 'N/A'
- if PkgVersion == None or len(PkgVersion.strip()) == 0:
+ if PkgVersion is None or len(PkgVersion.strip()) == 0:
PkgVersion = 'N/A'
if os.name == 'posix':
@@ -361,13 +361,13 @@ class IpiDatabase(object):
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
DpVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if DpGuid == None or len(DpGuid.strip()) == 0:
+ if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
- if DpVersion == None or len(DpVersion.strip()) == 0:
+ if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
@@ -391,10 +391,10 @@ class IpiDatabase(object):
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
DepexVersion=None):
- if DepexGuid == None or len(DepexGuid.strip()) == 0:
+ if DepexGuid is None or len(DepexGuid.strip()) == 0:
DepexGuid = 'N/A'
- if DepexVersion == None or len(DepexVersion.strip()) == 0:
+ if DepexVersion is None or len(DepexVersion.strip()) == 0:
DepexVersion = 'N/A'
if os.name == 'posix':
@@ -510,7 +510,7 @@ class IpiDatabase(object):
#
def GetDp(self, Guid, Version):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
(DpGuid, DpVersion) = (Guid, Version)
@@ -642,7 +642,7 @@ class IpiDatabase(object):
PackageVersion)
self.Cur.execute(SqlCommand)
- elif Version == None or len(Version.strip()) == 0:
+ elif Version is None or len(Version.strip()) == 0:
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
(self.PkgTable, Guid)
diff --git a/BaseTools/Source/Python/UPT/Core/PackageFile.py b/BaseTools/Source/Python/UPT/Core/PackageFile.py
index 5fafd85bffbf..ec6f5503eaad 100644
--- a/BaseTools/Source/Python/UPT/Core/PackageFile.py
+++ b/BaseTools/Source/Python/UPT/Core/PackageFile.py
@@ -56,7 +56,7 @@ class PackageFile:
ExtraData="%s (%s)" % (FileName, str(Xstr)))
BadFile = self._ZipFile.testzip()
- if BadFile != None:
+ if BadFile is not None:
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
ExtraData="[%s] in %s" % (BadFile, FileName))
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
index d7eaf3ea1d12..9373a144190d 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
@@ -618,11 +618,11 @@ def GenSourceStatement(SourceFile, Family, FeatureFlag, TagName=None,
# format of SourceFile|Family|TagName|ToolCode|FeatureFlag
#
Statement += SourceFile
- if TagName == None:
+ if TagName is None:
TagName = ''
- if ToolCode == None:
+ if ToolCode is None:
ToolCode = ''
- if HelpStr == None:
+ if HelpStr is None:
HelpStr = ''
if FeatureFlag:
Statement += '|' + Family + '|' + TagName + '|' + ToolCode + '|' + FeatureFlag
diff --git a/BaseTools/Source/Python/UPT/InstallPkg.py b/BaseTools/Source/Python/UPT/InstallPkg.py
index a8d0e1ec440a..c0d56b55aacd 100644
--- a/BaseTools/Source/Python/UPT/InstallPkg.py
+++ b/BaseTools/Source/Python/UPT/InstallPkg.py
@@ -91,7 +91,7 @@ def InstallNewPackage(WorkspaceDir, Path, CustomPath = False):
# @param PathList: The already installed standalone module Path list
#
def InstallNewModule(WorkspaceDir, Path, PathList = None):
- if PathList == None:
+ if PathList is None:
PathList = []
Path = ConvertPath(Path)
Path = os.path.normpath(Path)
--git a/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
index e6d45103f94b..38f7012fd4f8 100644
--- a/BaseTools/Source/Python/UPT/Library/CommentParsing.py
+++ b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
@@ -555,15 +555,15 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
# from HelpText
#
for Token in List[0:NumTokens]:
- if Usage == None and Token in UsageTokens:
+ if Usage is None and Token in UsageTokens:
Usage = UsageTokens[Token]
HelpText = HelpText.replace(Token, '')
- if Usage != None or not ParseVariable:
+ if Usage is not None or not ParseVariable:
for Token in List[0:NumTokens]:
- if Type == None and Token in TypeTokens:
+ if Type is None and Token in TypeTokens:
Type = TypeTokens[Token]
HelpText = HelpText.replace(Token, '')
- if Usage != None:
+ if Usage is not None:
for Token in List[0:NumTokens]:
if Token in RemoveTokens:
HelpText = HelpText.replace(Token, '')
@@ -571,13 +571,13 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
#
# If no Usage token is present and set Usage to UNDEFINED
#
- if Usage == None:
+ if Usage is None:
Usage = 'UNDEFINED'
#
# If no Type token is present and set Type to UNDEFINED
#
- if Type == None:
+ if Type is None:
Type = 'UNDEFINED'
#
diff --git a/BaseTools/Source/Python/UPT/Library/Misc.py b/BaseTools/Source/Python/UPT/Library/Misc.py
index 0d92cb3767c6..719445b3bd9a 100644
--- a/BaseTools/Source/Python/UPT/Library/Misc.py
+++ b/BaseTools/Source/Python/UPT/Library/Misc.py
@@ -120,7 +120,7 @@ def GuidStructureStringToGuidString(GuidValue):
# @param Directory: The directory name
#
def CreateDirectory(Directory):
- if Directory == None or Directory.strip() == "":
+ if Directory is None or Directory.strip() == "":
return True
try:
if not access(Directory, F_OK):
@@ -134,7 +134,7 @@ def CreateDirectory(Directory):
# @param Directory: The directory name
#
def RemoveDirectory(Directory, Recursively=False):
- if Directory == None or Directory.strip() == "" or not \
+ if Directory is None or Directory.strip() == "" or not \
os.path.exists(Directory):
return
if Recursively:
@@ -237,7 +237,7 @@ def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
#
def ValidFile(File, Ext=None):
File = File.replace('\\', '/')
- if Ext != None:
+ if Ext is not None:
FileExt = os.path.splitext(File)[1]
if FileExt.lower() != Ext.lower():
return False
@@ -423,7 +423,7 @@ class Sdict(IterableUserDict):
## update method
#
def update(self, Dict=None, **Kwargs):
- if Dict != None:
+ if Dict is not None:
for Key1, Val1 in Dict.items():
self[Key1] = Val1
if len(Kwargs):
@@ -529,7 +529,7 @@ class PathClass(object):
## _GetFileKey
#
def _GetFileKey(self):
- if self._Key == None:
+ if self._Key is None:
self._Key = self.Path.upper()
return self._Key
## Validate
diff --git a/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
index 028cf9a54f84..2def90a93b51 100644
--- a/BaseTools/Source/Python/UPT/Library/ParserValidate.py
+++ b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
@@ -128,7 +128,7 @@ def IsValidInfComponentType(ComponentType):
#
def IsValidToolFamily(ToolFamily):
ReIsValieFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValieFamily.match(ToolFamily) == None:
+ if ReIsValieFamily.match(ToolFamily) is None:
return False
return True
@@ -159,7 +159,7 @@ def IsValidArch(Arch):
if Arch == 'common':
return True
ReIsValieArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
- if ReIsValieArch.match(Arch) == None:
+ if ReIsValieArch.match(Arch) is None:
return False
return True
@@ -179,7 +179,7 @@ def IsValidFamily(Family):
return True
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValidFamily.match(Family) == None:
+ if ReIsValidFamily.match(Family) is None:
return False
return True
@@ -199,13 +199,13 @@ def IsValidBuildOptionName(BuildOptionName):
ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
- if ReIsValidBuildOption1.match(ToolOptionList[0]) == None:
+ if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
return False
- if ReIsValidBuildOption1.match(ToolOptionList[1]) == None:
+ if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
return False
- if ReIsValidBuildOption2.match(ToolOptionList[2]) == None:
+ if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
return False
if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
@@ -442,7 +442,7 @@ def IsValidDecVersion(Word):
ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
else:
ReIsValidDecVersion = re.compile(r"[0-9]+$")
- if ReIsValidDecVersion.match(Word) == None:
+ if ReIsValidDecVersion.match(Word) is None:
return False
return True
@@ -457,7 +457,7 @@ def IsValidDecVersion(Word):
#
def IsValidHexVersion(Word):
ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
- if ReIsValidHexVersion.match(Word) == None:
+ if ReIsValidHexVersion.match(Word) is None:
return False
return True
@@ -471,7 +471,7 @@ def IsValidHexVersion(Word):
#
def IsValidBuildNumber(Word):
ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
- if ReIsValieBuildNumber.match(Word) == None:
+ if ReIsValieBuildNumber.match(Word) is None:
return False
return True
@@ -488,7 +488,7 @@ def IsValidDepex(Word):
return IsValidCFormatGuid(Word[Index+4:].strip())
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
- if ReIsValidCName.match(Word) == None:
+ if ReIsValidCName.match(Word) is None:
return False
return True
@@ -585,11 +585,11 @@ def IsValidPcdValue(PcdValue):
return True
ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
- if ReIsValidIntegerSingle.match(PcdValue) != None:
+ if ReIsValidIntegerSingle.match(PcdValue) is not None:
return True
ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
- if ReIsValidIntegerMulti.match(PcdValue) != None:
+ if ReIsValidIntegerMulti.match(PcdValue) is not None:
return True
#
@@ -654,7 +654,7 @@ def IsValidPcdValue(PcdValue):
#
def IsValidCVariableName(CName):
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
- if ReIsValidCName.match(CName) == None:
+ if ReIsValidCName.match(CName) is None:
return False
return True
@@ -669,7 +669,7 @@ def IsValidCVariableName(CName):
#
def IsValidIdentifier(Ident):
ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
- if ReIdent.match(Ident) == None:
+ if ReIdent.match(Ident) is None:
return False
return True
@@ -683,7 +683,7 @@ def IsValidIdentifier(Ident):
def IsValidDecVersionVal(Ver):
ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
- if ReVersion.match(Ver) == None:
+ if ReVersion.match(Ver) is None:
return False
return True
diff --git a/BaseTools/Source/Python/UPT/Library/Parsing.py b/BaseTools/Source/Python/UPT/Library/Parsing.py
index c34e7751442a..791e064761c0 100644
--- a/BaseTools/Source/Python/UPT/Library/Parsing.py
+++ b/BaseTools/Source/Python/UPT/Library/Parsing.py
@@ -134,7 +134,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
- if TokenInfoString != '' and TokenInfoString != None:
+ if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, DataType.TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
@@ -433,7 +433,7 @@ def GetComponents(Lines, KeyValues, CommentCharacter):
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
- if Line == None or Line == '':
+ if Line is None or Line == '':
continue
if FindBlock == False:
@@ -921,7 +921,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
FileLocalMacros[Name] = Value
ReIsValidMacroName = re.compile(r"^[A-Z][A-Z0-9_]*$", re.DOTALL)
- if ReIsValidMacroName.match(Name) == None:
+ if ReIsValidMacroName.match(Name) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACRONAME_INVALID % (Name),
@@ -940,7 +940,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
# <UnicodeString>, <CArray> are subset of <AsciiString>.
#
ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
- if ReIsValidMacroValue.match(Value) == None:
+ if ReIsValidMacroValue.match(Value) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACROVALUE_INVALID % (Value),
@@ -979,7 +979,7 @@ def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
else:
Section = '[' + SectionName + ']'
Content += '\n' + Section + '\n'
- if StatementList != None:
+ if StatementList is not None:
for Statement in StatementList:
LineList = Statement.split('\n')
NewStatement = ""
diff --git a/BaseTools/Source/Python/UPT/Library/String.py b/BaseTools/Source/Python/UPT/Library/String.py
index 278073e4a379..b79891ea1417 100644
--- a/BaseTools/Source/Python/UPT/Library/String.py
+++ b/BaseTools/Source/Python/UPT/Library/String.py
@@ -166,7 +166,7 @@ def SplitModuleType(Key):
#
def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None, FileName=None, Flag=False):
LastString = String
- if MacroDefinitions == None:
+ if MacroDefinitions is None:
MacroDefinitions = {}
while MacroDefinitions:
QuotedStringList = []
@@ -244,7 +244,7 @@ def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None
#
def NormPath(Path, Defines=None):
IsRelativePath = False
- if Defines == None:
+ if Defines is None:
Defines = {}
if Path:
if Path[0] == '.':
@@ -524,7 +524,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
# to be checked
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper() and Root:
ContainerFile = open(ContainerFilename, 'r').read()
@@ -552,7 +552,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
diff --git a/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
index 0014a7561ba8..66eefee9db31 100644
--- a/BaseTools/Source/Python/UPT/Library/UniClassObject.py
+++ b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
@@ -161,7 +161,7 @@ def GetLanguageCode1766(LangName, File=None):
for Key in gLANG_CONV_TABLE.keys():
if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
return Key
- if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
for Key in gLANG_CONV_TABLE.keys():
if Key == LangName[0:3].lower():
return Key
@@ -186,7 +186,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = gLANG_CONV_TABLE.get(LangName.lower())
- if TempLangName != None:
+ if TempLangName is not None:
return TempLangName
return LangName
else:
@@ -200,7 +200,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if LangName.isalpha():
return LangName
elif length == 3:
- if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None:
+ if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
@@ -208,7 +208,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
- if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.Error("Unicode File Parser",
@@ -270,14 +270,14 @@ class StringDefClassObject(object):
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
- if Name != None:
+ if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
- if Value != None:
+ if Value is not None:
self.StringValue = Value
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
- if Token != None:
+ if Token is not None:
self.Token = Token
def __str__(self):
@@ -288,7 +288,7 @@ class StringDefClassObject(object):
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
- if Value != None:
+ if Value is not None:
if self.StringValue:
self.StringValue = self.StringValue + '\r\n' + Value
else:
@@ -393,7 +393,7 @@ class UniFileClassObject(object):
# Check the string name is the upper character
if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
@@ -798,7 +798,7 @@ class UniFileClassObject(object):
# Load a .uni file
#
def LoadUniFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.Error("Unicode File Parser",
ToolError.PARSER_ERROR,
Message='No unicode file is given',
@@ -901,7 +901,7 @@ class UniFileClassObject(object):
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
- if Value != None:
+ if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
diff --git a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
index d7614b884990..f20ae4dfa82f 100644
--- a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
@@ -36,14 +36,14 @@ import Logger.Log as Logger
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
- if String != '' and String != None:
+ if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if type(Item) == type([]):
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
@@ -52,7 +52,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
@@ -66,7 +66,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
# @param String A XPath style path.
#
def XmlList(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
@@ -101,7 +101,7 @@ def XmlList(Dom, String):
# @param String A XPath style path.
#
def XmlNode(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return None
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
diff --git a/BaseTools/Source/Python/UPT/Logger/Log.py b/BaseTools/Source/Python/UPT/Logger/Log.py
index 407a1b32b6ee..ae06a1ae2a50 100644
--- a/BaseTools/Source/Python/UPT/Logger/Log.py
+++ b/BaseTools/Source/Python/UPT/Logger/Log.py
@@ -134,7 +134,7 @@ def Debug(Level, Message, ExtraData=None):
"msg" : Message,
}
- if ExtraData != None:
+ if ExtraData is not None:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict
@@ -165,10 +165,10 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
#
# if no tool name given, use caller's source file name as tool name
#
- if ToolName == None or ToolName == "":
+ if ToolName is None or ToolName == "":
ToolName = os.path.basename(extract_stack()[-2][0])
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
@@ -180,12 +180,12 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
"msg" : Message,
}
- if File != None:
+ if File is not None:
LogText = _WARNING_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = _WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
- if ExtraData != None:
+ if ExtraData is not None:
LogText += "\n %s" % ExtraData
_INFO_LOGGER.log(WARN, LogText)
@@ -215,18 +215,18 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
ExtraData=None, RaiseError=IS_RAISE_ERROR):
if ToolName:
pass
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
- if Message == None:
+ if Message is None:
if ErrorCode in gERROR_MESSAGE:
Message = gERROR_MESSAGE[ErrorCode]
else:
Message = gERROR_MESSAGE[UNKNOWN_ERROR]
- if ExtraData == None:
+ if ExtraData is None:
ExtraData = ""
TemplateDict = {
@@ -238,7 +238,7 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
"extra" : ExtraData
}
- if File != None:
+ if File is not None:
LogText = _ERROR_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = __ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
diff --git a/BaseTools/Source/Python/UPT/MkPkg.py b/BaseTools/Source/Python/UPT/MkPkg.py
index 87c84f0cc25b..ff9aa7fb117c 100644
--- a/BaseTools/Source/Python/UPT/MkPkg.py
+++ b/BaseTools/Source/Python/UPT/MkPkg.py
@@ -73,7 +73,7 @@ def CheckForExistingDp(Path):
#
#
def Main(Options = None):
- if Options == None:
+ if Options is None:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
try:
DataBase = GlobalData.gDB
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
index f968beee6081..33b142d64e07 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
@@ -271,7 +271,7 @@ class InfBinariesObject(InfSectionCommonDef):
#
pass
- if InfBianryVerItemObj != None:
+ if InfBianryVerItemObj is not None:
if self.Binaries.has_key((InfBianryVerItemObj)):
BinariesList = self.Binaries[InfBianryVerItemObj]
BinariesList.append((InfBianryVerItemObj, VerComment))
@@ -521,7 +521,7 @@ class InfBinariesObject(InfSectionCommonDef):
# #
# pass
- if InfBianryCommonItemObj != None:
+ if InfBianryCommonItemObj is not None:
if self.Binaries.has_key((InfBianryCommonItemObj)):
BinariesList = self.Binaries[InfBianryCommonItemObj]
BinariesList.append((InfBianryCommonItemObj, ItemComment))
@@ -538,11 +538,11 @@ class InfBinariesObject(InfSectionCommonDef):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
- if UiInf != None:
+ if UiInf is not None:
if len(UiInf) > 0:
#
# Check UI
@@ -672,7 +672,7 @@ class InfBinariesObject(InfSectionCommonDef):
# #
# pass
- if InfBianryUiItemObj != None:
+ if InfBianryUiItemObj is not None:
if self.Binaries.has_key((InfBianryUiItemObj)):
BinariesList = self.Binaries[InfBianryUiItemObj]
BinariesList.append((InfBianryUiItemObj, UiComment))
@@ -681,7 +681,7 @@ class InfBinariesObject(InfSectionCommonDef):
BinariesList = []
BinariesList.append((InfBianryUiItemObj, UiComment))
self.Binaries[InfBianryUiItemObj] = BinariesList
- if Ver != None and len(Ver) > 0:
+ if Ver is not None and len(Ver) > 0:
self.CheckVer(Ver, __SupArchList)
if CommonBinary and len(CommonBinary) > 0:
self.ParseCommonBinary(CommonBinary, __SupArchList)
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
index 1d074ee638fd..bbc797f65e37 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
@@ -62,7 +62,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciVendorId != None:
+ if self.PciVendorId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
LineInfo=self.CurrentLine)
return False
@@ -86,7 +86,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciDeviceId != None:
+ if self.PciDeviceId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
LineInfo=self.CurrentLine)
return False
@@ -110,7 +110,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciClassCode != None:
+ if self.PciClassCode is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
LineInfo=self.CurrentLine)
return False
@@ -135,7 +135,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciRevision != None:
+ if self.PciRevision is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
LineInfo=self.CurrentLine)
return False
@@ -159,7 +159,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciCompress != None:
+ if self.PciCompress is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
LineInfo=self.CurrentLine)
return False
@@ -215,11 +215,11 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.BaseName != None:
+ if self.BaseName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
LineInfo=self.CurrentLine)
return False
- if not (BaseName == '' or BaseName == None):
+ if not (BaseName == '' or BaseName is None):
if IsValidWord(BaseName) and not BaseName.startswith("_"):
self.BaseName = InfDefMember()
self.BaseName.SetValue(BaseName)
@@ -243,7 +243,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.FileGuid != None:
+ if self.FileGuid is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_FILE_GUID),
LineInfo=self.CurrentLine)
@@ -274,7 +274,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.ModuleType != None:
+ if self.ModuleType is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_MODULE_TYPE),
LineInfo=self.CurrentLine)
@@ -309,7 +309,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
def SetModuleUniFileName(self, ModuleUniFileName, Comments):
if Comments:
pass
- if self.ModuleUniFileName != None:
+ if self.ModuleUniFileName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_MODULE_UNI_FILE),
LineInfo=self.CurrentLine)
self.ModuleUniFileName = ModuleUniFileName
@@ -327,7 +327,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.InfVersion != None:
+ if self.InfVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_INF_VERSION),
LineInfo=self.CurrentLine)
@@ -368,7 +368,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.EdkReleaseVersion != None:
+ if self.EdkReleaseVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
LineInfo=self.CurrentLine)
@@ -401,7 +401,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.UefiSpecificationVersion != None:
+ if self.UefiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
@@ -434,7 +434,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.PiSpecificationVersion != None:
+ if self.PiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
@@ -495,7 +495,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.VersionString != None:
+ if self.VersionString is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_VERSION_STRING),
LineInfo=self.CurrentLine)
@@ -517,7 +517,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.PcdIsDriver != None:
+ if self.PcdIsDriver is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
LineInfo=self.CurrentLine)
@@ -710,7 +710,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.Shadow != None:
+ if self.Shadow is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
LineInfo=self.CurrentLine)
return False
@@ -731,7 +731,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
# <CustomMake> ::= [<Family> "|"] <Filename>
#
def SetCustomMakefile(self, CustomMakefile, Comments):
- if not (CustomMakefile == '' or CustomMakefile == None):
+ if not (CustomMakefile == '' or CustomMakefile is None):
ValueList = GetSplitValueList(CustomMakefile)
if len(ValueList) == 1:
FileName = ValueList[0]
@@ -811,12 +811,12 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.UefiHiiResourceSection != None:
+ if self.UefiHiiResourceSection is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
%(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
LineInfo=self.CurrentLine)
return False
- if not (UefiHiiResourceSection == '' or UefiHiiResourceSection == None):
+ if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
if (IsValidBoolType(UefiHiiResourceSection)):
self.UefiHiiResourceSection = InfDefMember()
self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
@@ -948,7 +948,7 @@ class InfDefObject(InfSectionCommonDef):
RaiseError=True)
if Name == DT.TAB_INF_DEFINES_INF_VERSION:
HasFoundInfVersionFalg = True
- if not (Name == '' or Name == None):
+ if not (Name == '' or Name is None):
#
# Process "SPEC" Keyword definition.
#
@@ -971,7 +971,7 @@ class InfDefObject(InfSectionCommonDef):
LineInfo=LineInfo)
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
- if (ProcessFunc != None):
+ if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
else:
@@ -991,7 +991,7 @@ class InfDefObject(InfSectionCommonDef):
#
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
- if (ProcessFunc != None):
+ if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
#
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
index 23125552e06d..fb8d1f5a62ee 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
@@ -107,7 +107,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
#
# Get/Set Usage and HelpString
#
- if CommentsList != None and len(CommentsList) != 0 :
+ if CommentsList is not None and len(CommentsList) != 0 :
CommentInsList = []
PreUsage = None
PreGuidType = None
@@ -126,7 +126,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
[],
True)
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -236,7 +236,7 @@ class InfGuidObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupportArchList.append(ArchItem)
--git a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
index 45fba31aaae9..dce75063dfb5 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
@@ -43,7 +43,7 @@ class InfHeaderObject():
# @param FileName: File Name
#
def SetFileName(self, FileName):
- if not (FileName == '' or FileName == None):
+ if not (FileName == '' or FileName is None):
self.FileName = FileName
return True
else:
@@ -59,7 +59,7 @@ class InfHeaderObject():
# @param Abstract: Abstract
#
def SetAbstract(self, Abstract):
- if not (Abstract == '' or Abstract == None):
+ if not (Abstract == '' or Abstract is None):
self.Abstract = Abstract
return True
else:
@@ -75,7 +75,7 @@ class InfHeaderObject():
# @param Description: Description content
#
def SetDescription(self, Description):
- if not (Description == '' or Description == None):
+ if not (Description == '' or Description is None):
self.Description = Description
return True
else:
@@ -91,7 +91,7 @@ class InfHeaderObject():
# @param Copyright: Copyright content
#
def SetCopyright(self, Copyright):
- if not (Copyright == '' or Copyright == None):
+ if not (Copyright == '' or Copyright is None):
self.Copyright = Copyright
return True
else:
@@ -107,7 +107,7 @@ class InfHeaderObject():
# @param License: License content
#
def SetLicense(self, License):
- if not (License == '' or License == None):
+ if not (License == '' or License is None):
self.License = License
return True
else:
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
index b18c4c381bc0..e588c6ba66d8 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
@@ -38,10 +38,10 @@ def GetArchModuleType(KeyList):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
- if (ModuleItem == '' or ModuleItem == None):
+ if (ModuleItem == '' or ModuleItem is None):
ModuleItem = 'COMMON'
if ArchItem not in __SupArchList:
@@ -136,7 +136,7 @@ class InfLibraryClassObject():
LibItemObj.CurrentLine.SetLineNo(LibItem[2][1])
LibItemObj.CurrentLine.SetLineString(LibItem[2][0])
LibItem = LibItem[0]
- if HelpStringObj != None:
+ if HelpStringObj is not None:
LibItemObj.SetHelpString(HelpStringObj)
if len(LibItem) >= 1:
if LibItem[0].strip() != '':
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
index 74099e208860..37f8cb2336bb 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
@@ -135,9 +135,9 @@ class InfSpecialCommentObject(InfSectionCommonDef):
# An encapsulate of Error for INF parser.
#
def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
- if ErrorCode == None:
+ if ErrorCode is None:
ErrorCode = ToolError.FORMAT_INVALID
- if LineInfo == None:
+ if LineInfo is None:
LineInfo = ['', -1, '']
Logger.Error("InfParser",
ErrorCode,
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
index 37399134dbf3..01c854a8470e 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
@@ -75,7 +75,7 @@ class InfPackageObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
SupArchList.append(ArchItem)
@@ -84,7 +84,7 @@ class InfPackageObject():
HelpStringObj = PackageItem[1]
CurrentLineOfPackItem = PackageItem[2]
PackageItem = PackageItem[0]
- if HelpStringObj != None:
+ if HelpStringObj is not None:
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
PackageItemObj.SetHelpString(HelpString)
if len(PackageItem) >= 1:
@@ -183,5 +183,5 @@ class InfPackageObject():
return True
def GetPackages(self, Arch = None):
- if Arch == None:
+ if Arch is None:
return self.Packages
\ No newline at end of file
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
index 7b07036f91c2..d2712a97f2ff 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
@@ -43,7 +43,7 @@ def ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
if PcdTypeItem1.upper != DT.TAB_INF_FEATURE_PCD.upper():
@@ -82,7 +82,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
if PcdTypeItem == 'FeaturePcd':
CommentItemUsage = DT.USAGE_ITEM_CONSUMES
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == 1:
@@ -96,7 +96,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
else:
continue
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentList) and CommentItemUsage == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -326,7 +326,7 @@ class InfPcdObject():
#
# Validate PcdType
#
- if (PcdTypeItem1 == '' or PcdTypeItem1 == None):
+ if (PcdTypeItem1 == '' or PcdTypeItem1 is None):
return False
else:
if not IsValidPcdType(PcdTypeItem1):
@@ -346,7 +346,7 @@ class InfPcdObject():
CurrentLineOfPcdItem = PcdItem[2]
PcdItem = PcdItem[0]
- if CommentList != None and len(CommentList) != 0:
+ if CommentList is not None and len(CommentList) != 0:
PcdItemObj = ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj)
else:
CommentItemIns = InfPcdItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
index 4df62bb459ff..eb6b6927140b 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
@@ -51,7 +51,7 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
if CommentItemString:
pass
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -213,7 +213,7 @@ class InfPpiObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -290,7 +290,7 @@ class InfPpiObject():
#
# Get/Set Usage and HelpString for PPI entry
#
- if CommentsList != None and len(CommentsList) != 0:
+ if CommentsList is not None and len(CommentsList) != 0:
InfPpiItemObj = ParsePpiComment(CommentsList, InfPpiItemObj)
else:
CommentItemIns = InfPpiItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
index c94e53c98f87..eb03095d6fec 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
@@ -49,7 +49,7 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
if CommentItemString:
pass
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -203,7 +203,7 @@ class InfProtocolObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -259,7 +259,7 @@ class InfProtocolObject():
#
# Get/Set Usage and HelpString for Protocol entry
#
- if CommentsList != None and len(CommentsList) != 0:
+ if CommentsList is not None and len(CommentsList) != 0:
InfProtocolItemObj = ParseProtocolComment(CommentsList, InfProtocolItemObj)
else:
CommentItemIns = InfProtocolItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
index 9988f8ecfeed..2302dd5b9673 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
@@ -211,7 +211,7 @@ class InfSourcesObject(InfSectionCommonDef):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
diff --git a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
index 498f2d7634a5..4eed04c01765 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
@@ -155,7 +155,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
DT.MODEL_META_DATA_HEADER,
DefineSectionMacros)
- if Name != None:
+ if Name is not None:
DefineSectionMacros[Name] = Value
continue
@@ -168,7 +168,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
FileNameString,
DT.MODEL_META_DATA_PACKAGE,
DefineSectionMacros)
- if Name != None:
+ if Name is not None:
PackageSectionMacros[Name] = Value
continue
diff --git a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
index f1d6943cbfff..f220402cb577 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
@@ -112,7 +112,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
if BinLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = BinLineContent[BinLineContent.find(DT.TAB_COMMENT_SPLIT):]
BinLineContent = BinLineContent[:BinLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -123,7 +123,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_BINARY_FILE,
self.FileLocalMacros)
- if MacroDef[0] != None:
+ if MacroDef[0] is not None:
SectionMacros[MacroDef[0]] = MacroDef[1]
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
index d00087a128a0..f7749d55a062 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
@@ -133,7 +133,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -144,7 +144,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_META_DATA_HEADER,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
self.FileLocalMacros[Name] = Value
continue
@@ -173,7 +173,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
Name, Value = _ValueList[0], _ValueList[1]
InfDefMemberObj = InfDefMember(Name, Value)
- if (LineComment != None):
+ if (LineComment is not None):
InfDefMemberObj.Comments.SetHeaderComments(LineComment.GetHeaderComments())
InfDefMemberObj.Comments.SetTailComments(LineComment.GetTailComments())
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
index 5cafc80ca5c3..332e2f014310 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
@@ -87,7 +87,7 @@ class InfDepexSectionParser(InfParserSectionRoot):
ReFormatComment = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
for CommentItem in DepexComment:
CommentContent = CommentItem[0]
- if ReFormatComment.match(CommentContent) != None:
+ if ReFormatComment.match(CommentContent) is not None:
FormatCommentLn = CommentItem[1] + 1
continue
diff --git a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
index 12ffedaaec61..956c116c6e79 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
@@ -77,7 +77,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_GUID,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
CommentsList = []
ValueList = []
@@ -164,7 +164,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PPI,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
@@ -334,7 +334,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PROTOCOL,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
index 210f973f1a4d..549e67f08d64 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
@@ -96,7 +96,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
if LibLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
LibTailComments = LibLineContent[LibLineContent.find(DT.TAB_COMMENT_SPLIT):]
LibLineContent = LibLineContent[:LibLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LibLineComment == None:
+ if LibLineComment is None:
LibLineComment = InfLineCommentObject()
LibLineComment.SetTailComments(LibTailComments)
@@ -107,7 +107,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_LIBRARY_CLASS,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LibLineComment = None
LibHeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
index 67f1145322ad..8fb2898826e6 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
@@ -89,7 +89,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
if PkgLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = PkgLineContent[PkgLineContent.find(DT.TAB_COMMENT_SPLIT):]
PkgLineContent = PkgLineContent[:PkgLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
@@ -99,7 +99,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_META_DATA_PACKAGE,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParser.py b/BaseTools/Source/Python/UPT/Parser/InfParser.py
index e7bef2e35e0f..7bea49e0e861 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParser.py
@@ -97,7 +97,7 @@ class InfParser(InfSectionParser):
#
# Load Inf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.ParseInfFile(Filename)
## Parse INF file
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
index a416897d27ae..6a335e8b6c75 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
@@ -73,9 +73,9 @@ gINF_SECTION_DEF = {
# @param Flag If the flag set to True, need to skip macros in a quoted string
#
def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Flag=False):
- if GlobalMacros == None:
+ if GlobalMacros is None:
GlobalMacros = {}
- if SectionMacros == None:
+ if SectionMacros is None:
SectionMacros = {}
FileName = LineInfo[0]
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
index f4324cc2ff1b..a9b87fdc0565 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
@@ -95,7 +95,7 @@ class InfPcdSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PCD,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
index 51db7960353d..645c2c341460 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
@@ -86,7 +86,7 @@ class InfSourceSectionParser(InfParserSectionRoot):
if SrcLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = SrcLineContent[SrcLineContent.find(DT.TAB_COMMENT_SPLIT):]
SrcLineContent = SrcLineContent[:SrcLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -97,7 +97,7 @@ class InfSourceSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_SOURCE_FILE,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
index a15173285345..e37a0b6c3be7 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
@@ -167,11 +167,11 @@ class InfPomAlignment(ModuleObject):
#
# Convert UEFI/PI version to decimal number
#
- if DefineObj.GetUefiSpecificationVersion() != None:
+ if DefineObj.GetUefiSpecificationVersion() is not None:
__UefiVersion = DefineObj.GetUefiSpecificationVersion().GetValue()
__UefiVersion = ConvertVersionToDecimal(__UefiVersion)
self.SetUefiSpecificationVersion(str(__UefiVersion))
- if DefineObj.GetPiSpecificationVersion() != None:
+ if DefineObj.GetPiSpecificationVersion() is not None:
__PiVersion = DefineObj.GetPiSpecificationVersion().GetValue()
__PiVersion = ConvertVersionToDecimal(__PiVersion)
@@ -186,7 +186,7 @@ class InfPomAlignment(ModuleObject):
# must exist items in INF define section
# MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
#
- if DefineObj.GetModuleType() == None:
+ if DefineObj.GetModuleType() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("MODULE_TYPE"), File=self.FullPath)
else:
@@ -205,7 +205,7 @@ class InfPomAlignment(ModuleObject):
Line=DefineObj.ModuleType.CurrentLine.LineNo,
ExtraData=DefineObj.ModuleType.CurrentLine.LineString)
self.LibModuleTypeList.append(ModuleType)
- if DefineObj.GetBaseName() == None:
+ if DefineObj.GetBaseName() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("BASE_NAME"), File=self.FullPath)
else:
@@ -214,17 +214,17 @@ class InfPomAlignment(ModuleObject):
self.UniFileClassObject = UniFileClassObject([PathClass(DefineObj.GetModuleUniFileName())])
else:
self.UniFileClassObject = None
- if DefineObj.GetInfVersion() == None:
+ if DefineObj.GetInfVersion() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("INF_VERSION"), File=self.FullPath)
else:
self.SetVersion(DefineObj.GetInfVersion().GetValue())
- if DefineObj.GetFileGuid() == None:
+ if DefineObj.GetFileGuid() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("FILE_GUID"), File=self.FullPath)
else:
self.SetGuid(DefineObj.GetFileGuid().GetValue())
- if DefineObj.GetVersionString() == None:
+ if DefineObj.GetVersionString() is None:
#
# VERSION_STRING is missing from the [Defines] section, tools must assume that the module's version is 0.
#
@@ -256,7 +256,7 @@ class InfPomAlignment(ModuleObject):
if not (ModuleTypeValue == 'SEC' or ModuleTypeValue == 'PEI_CORE' or ModuleTypeValue == 'PEIM'):
Logger.Error("InfParser", FORMAT_INVALID, ST.ERR_INF_PARSER_DEFINE_SHADOW_INVALID, File=self.FullPath)
- if DefineObj.GetPcdIsDriver() != None:
+ if DefineObj.GetPcdIsDriver() is not None:
self.SetPcdIsDriver(DefineObj.GetPcdIsDriver().GetValue())
#
# LIBRARY_CLASS
@@ -499,7 +499,7 @@ class InfPomAlignment(ModuleObject):
LibraryClass.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
LibraryClass.SetSupModuleList(Item.GetSupModuleList())
HelpStringObj = Item.GetHelpString()
- if HelpStringObj != None:
+ if HelpStringObj is not None:
CommentString = GetHelpStringByRemoveHashKey(HelpStringObj.HeaderComments +
HelpStringObj.TailComments)
HelpTextHeaderObj = CommonObject.TextObject()
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
index 042d4784c84c..cca70e564042 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
@@ -45,7 +45,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
CustomMakefile = DefineObj.GetCustomMakefile()
UefiHiiResourceSection = DefineObj.GetUefiHiiResourceSection()
- if EdkReleaseVersion != None:
+ if EdkReleaseVersion is not None:
Name = DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION
Value = EdkReleaseVersion.GetValue()
Statement = _GenInfDefineStateMent(EdkReleaseVersion.Comments.GetHeaderComments(),
@@ -54,7 +54,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
EdkReleaseVersion.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if Shadow != None:
+ if Shadow is not None:
Name = DT.TAB_INF_DEFINES_SHADOW
Value = Shadow.GetValue()
Statement = _GenInfDefineStateMent(Shadow.Comments.GetHeaderComments(),
@@ -63,7 +63,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
Shadow.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if DpxSource != None:
+ if DpxSource is not None:
Name = DT.TAB_INF_DEFINES_DPX_SOURCE
for DpxSourceItem in DpxSource:
Value = DpxSourceItem[0]
@@ -73,7 +73,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
DpxSourceItem[1].GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciVendorId != None:
+ if PciVendorId is not None:
Name = DT.TAB_INF_DEFINES_PCI_VENDOR_ID
Value = PciVendorId.GetValue()
Statement = _GenInfDefineStateMent(PciVendorId.Comments.GetHeaderComments(),
@@ -82,7 +82,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciVendorId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciDeviceId != None:
+ if PciDeviceId is not None:
Name = DT.TAB_INF_DEFINES_PCI_DEVICE_ID
Value = PciDeviceId.GetValue()
Statement = _GenInfDefineStateMent(PciDeviceId.Comments.GetHeaderComments(),
@@ -91,7 +91,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciDeviceId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciClassCode != None:
+ if PciClassCode is not None:
Name = DT.TAB_INF_DEFINES_PCI_CLASS_CODE
Value = PciClassCode.GetValue()
Statement = _GenInfDefineStateMent(PciClassCode.Comments.GetHeaderComments(),
@@ -100,7 +100,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciClassCode.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciRevision != None:
+ if PciRevision is not None:
Name = DT.TAB_INF_DEFINES_PCI_REVISION
Value = PciRevision.GetValue()
Statement = _GenInfDefineStateMent(PciRevision.Comments.GetHeaderComments(),
@@ -109,7 +109,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciRevision.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciCompress != None:
+ if PciCompress is not None:
Name = DT.TAB_INF_DEFINES_PCI_COMPRESS
Value = PciCompress.GetValue()
Statement = _GenInfDefineStateMent(PciCompress.Comments.GetHeaderComments(),
@@ -138,7 +138,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
DefinesDictNew[Statement] = ArchString
- if UefiHiiResourceSection != None:
+ if UefiHiiResourceSection is not None:
Name = DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION
Value = UefiHiiResourceSection.GetValue()
HeaderComment = UefiHiiResourceSection.Comments.GetHeaderComments()
diff --git a/BaseTools/Source/Python/UPT/UPT.py b/BaseTools/Source/Python/UPT/UPT.py
index 325b96bf560d..09653cdce95f 100644
--- a/BaseTools/Source/Python/UPT/UPT.py
+++ b/BaseTools/Source/Python/UPT/UPT.py
@@ -90,7 +90,7 @@ def SetLogLevel(Opt):
Logger.SetLevel(Logger.VERBOSE)
elif Opt.opt_quiet:
Logger.SetLevel(Logger.QUIET + 1)
- elif Opt.debug_level != None:
+ elif Opt.debug_level is not None:
if Opt.debug_level < 0 or Opt.debug_level > 9:
Logger.Warn("UPT", ST.ERR_DEBUG_LEVEL)
Logger.SetLevel(Logger.INFO)
diff --git a/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py b/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
index 4dc1c7edab3d..51ac48aca58e 100644
--- a/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
@@ -550,7 +550,7 @@ class ModulePropertyXml(object):
Hob = Axml.FromXml(SubItem, 'HOB')
self.HOBs.append(Hob)
- if Header == None:
+ if Header is None:
Header = ModuleObject()
Header.SetModuleType(self.ModuleType)
diff --git a/BaseTools/Source/Python/UPT/Xml/XmlParser.py b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
index 58959081d0ab..dba3b7f5892c 100644
--- a/BaseTools/Source/Python/UPT/Xml/XmlParser.py
+++ b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
@@ -162,7 +162,7 @@ class DistributionPackageXml(object):
def FromXml(self, Filename=None):
- if Filename != None:
+ if Filename is not None:
self.DistP = DistributionPackageClass()
#
# Load to XML
@@ -227,7 +227,7 @@ class DistributionPackageXml(object):
def ToXml(self, DistP):
if self.DistP:
pass
- if DistP != None:
+ if DistP is not None:
#
# Parse DistributionPackageHeader
#
@@ -344,7 +344,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Guids']
for Item in Module.GetGuidList():
- if Item == None:
+ if Item is None:
CheckDict = {'GuidCName':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -369,7 +369,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Protocols']
for Item in Module.GetProtocolList():
- if Item == None:
+ if Item is None:
CheckDict = {'Protocol':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -384,7 +384,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PPIs']
for Item in Module.GetPpiList():
- if Item == None:
+ if Item is None:
CheckDict = {'Ppi':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -399,7 +399,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded']
for Item in Module.GetPcdList():
- if Item == None:
+ if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -416,7 +416,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Externs']
for Item in Module.GetExternList():
- if Item == None:
+ if Item is None:
CheckDict = {'Extern':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -536,7 +536,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions']
for Item in Module.GetLibraryClassList():
- if Item == None:
+ if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -608,7 +608,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
for Item in Module.GetSourceFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'Filename':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -636,7 +636,7 @@ def ValidateMS3(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies']
for Item in Module.GetPackageDependencyList():
- if Item == None:
+ if Item is None:
CheckDict = {'Package':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -649,7 +649,7 @@ def ValidateMS3(Module, TopXmlTreeLevel):
# Check BinaryFiles -> BinaryFile
#
for Item in Module.GetBinaryFileList():
- if Item == None:
+ if Item is None:
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles']
CheckDict = {'BinaryFile':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -772,7 +772,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ClonedFrom']
for Item in Package.GetClonedFromList():
- if Item == None:
+ if Item is None:
CheckDict = Sdict()
CheckDict['GUID'] = ''
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -787,7 +787,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations']
for Item in Package.GetLibraryClassList():
- if Item == None:
+ if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -802,7 +802,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes']
for Item in Package.GetStandardIncludeFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'IndustryStandardHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -816,7 +816,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes']
for Item in Package.GetPackageIncludeFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'PackageHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -842,7 +842,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations']
for Item in Package.GetGuidList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -857,7 +857,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations']
for Item in Package.GetProtocolList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -872,7 +872,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations']
for Item in Package.GetPpiList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -887,7 +887,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations']
for Item in Package.GetPcdList():
- if Item == None:
+ if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
index a306dc0b2304..90c8246806d8 100644
--- a/BaseTools/Source/Python/Workspace/BuildClassObject.py
+++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -212,7 +212,7 @@ class LibraryClassObject(object):
def __init__(self, Name = None, SupModList = [], Type = None):
self.LibraryClass = Name
self.SupModList = SupModList
- if Type != None:
+ if Type is not None:
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
## ModuleBuildClassObject
diff --git a/BaseTools/Source/Python/Workspace/DecBuildData.py b/BaseTools/Source/Python/Workspace/DecBuildData.py
index 4d6edadc8f9d..49ef1df4aa76 100644
--- a/BaseTools/Source/Python/Workspace/DecBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DecBuildData.py
@@ -107,7 +107,7 @@ class DecBuildData(PackageBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
self.__Macros.update(GlobalData.gGlobalDefines)
return self.__Macros
@@ -145,34 +145,34 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve package name
def _GetPackageName(self):
- if self._PackageName == None:
- if self._Header == None:
+ if self._PackageName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._PackageName == None:
+ if self._PackageName is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
return self._PackageName
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
+ if self._Guid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
return self._Guid
## Retrieve package version
def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
+ if self._Version is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
self._Version = ''
return self._Version
## Retrieve protocol definitions (name/value pairs)
def _GetProtocol(self):
- if self._Protocols == None:
+ if self._Protocols is None:
#
# tdict is a special kind of dict, used for selecting correct
# protocol defition for given ARCH
@@ -214,7 +214,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve PPI definitions (name/value pairs)
def _GetPpi(self):
- if self._Ppis == None:
+ if self._Ppis is None:
#
# tdict is a special kind of dict, used for selecting correct
# PPI defition for given ARCH
@@ -256,7 +256,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve GUID definitions (name/value pairs)
def _GetGuid(self):
- if self._Guids == None:
+ if self._Guids is None:
#
# tdict is a special kind of dict, used for selecting correct
# GUID defition for given ARCH
@@ -298,7 +298,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve public include paths declared in this package
def _GetInclude(self):
- if self._Includes == None or self._CommonIncludes is None:
+ if self._Includes is None or self._CommonIncludes is None:
self._CommonIncludes = []
self._Includes = []
self._PrivateIncludes = []
@@ -333,7 +333,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve library class declarations (not used in build at present)
def _GetLibraryClass(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
#
# tdict is a special kind of dict, used for selecting correct
# library class declaration for given ARCH
@@ -357,7 +357,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve PCD declarations
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
@@ -422,7 +422,7 @@ class DecBuildData(PackageBuildClassObject):
# will automatically turn to 'common' ARCH and try again
#
Setting,LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
@@ -454,7 +454,7 @@ class DecBuildData(PackageBuildClassObject):
StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
for pcd in Pcds.values():
if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
- if StructPattern.match(pcd.DatumType) == None:
+ if StructPattern.match(pcd.DatumType) is None:
EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", pcd.DefinitionPosition[0],pcd.DefinitionPosition[1])
for struct_pcd in Pcds.values():
if isinstance(struct_pcd,StructurePcd) and not struct_pcd.StructuredPcdIncludeFile:
diff --git a/BaseTools/Source/Python/Workspace/DscBuildData.py b/BaseTools/Source/Python/Workspace/DscBuildData.py
index 1cc4dc966203..cf9608651269 100644
--- a/BaseTools/Source/Python/Workspace/DscBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DscBuildData.py
@@ -298,7 +298,7 @@ class DscBuildData(PlatformBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
self.__Macros.update(GlobalData.gPlatformDefines)
self.__Macros.update(GlobalData.gGlobalDefines)
@@ -368,7 +368,7 @@ class DscBuildData(PlatformBuildClassObject):
elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
self._BuildTargets = GetSplitValueList(Record[2])
elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
- if self._SkuName == None:
+ if self._SkuName is None:
self._SkuName = Record[2]
if GlobalData.gSKUID_CMD:
self._SkuName = GlobalData.gSKUID_CMD
@@ -427,76 +427,76 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve platform name
def _GetPlatformName(self):
- if self._PlatformName == None:
- if self._Header == None:
+ if self._PlatformName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._PlatformName == None:
+ if self._PlatformName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
return self._PlatformName
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
+ if self._Guid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_GUID", File=self.MetaFile)
return self._Guid
## Retrieve platform version
def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
+ if self._Version is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
return self._Version
## Retrieve platform description file version
def _GetDscSpec(self):
- if self._DscSpecification == None:
- if self._Header == None:
+ if self._DscSpecification is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._DscSpecification == None:
+ if self._DscSpecification is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
return self._DscSpecification
## Retrieve OUTPUT_DIRECTORY
def _GetOutpuDir(self):
- if self._OutputDirectory == None:
- if self._Header == None:
+ if self._OutputDirectory is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._OutputDirectory == None:
+ if self._OutputDirectory is None:
self._OutputDirectory = os.path.join("Build", self._PlatformName)
return self._OutputDirectory
## Retrieve SUPPORTED_ARCHITECTURES
def _GetSupArch(self):
- if self._SupArchList == None:
- if self._Header == None:
+ if self._SupArchList is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._SupArchList == None:
+ if self._SupArchList is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No SUPPORTED_ARCHITECTURES", File=self.MetaFile)
return self._SupArchList
## Retrieve BUILD_TARGETS
def _GetBuildTarget(self):
- if self._BuildTargets == None:
- if self._Header == None:
+ if self._BuildTargets is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BUILD_TARGETS", File=self.MetaFile)
return self._BuildTargets
def _GetPcdInfoFlag(self):
- if self._PcdInfoFlag == None or self._PcdInfoFlag.upper() == 'FALSE':
+ if self._PcdInfoFlag is None or self._PcdInfoFlag.upper() == 'FALSE':
return False
elif self._PcdInfoFlag.upper() == 'TRUE':
return True
else:
return False
def _GetVarCheckFlag(self):
- if self._VarCheckFlag == None or self._VarCheckFlag.upper() == 'FALSE':
+ if self._VarCheckFlag is None or self._VarCheckFlag.upper() == 'FALSE':
return False
elif self._VarCheckFlag.upper() == 'TRUE':
return True
@@ -505,10 +505,10 @@ class DscBuildData(PlatformBuildClassObject):
# # Retrieve SKUID_IDENTIFIER
def _GetSkuName(self):
- if self._SkuName == None:
- if self._Header == None:
+ if self._SkuName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._SkuName == None:
+ if self._SkuName is None:
self._SkuName = 'DEFAULT'
return self._SkuName
@@ -517,72 +517,72 @@ class DscBuildData(PlatformBuildClassObject):
self._SkuName = Value
def _GetFdfFile(self):
- if self._FlashDefinition == None:
- if self._Header == None:
+ if self._FlashDefinition is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._FlashDefinition == None:
+ if self._FlashDefinition is None:
self._FlashDefinition = ''
return self._FlashDefinition
def _GetPrebuild(self):
- if self._Prebuild == None:
- if self._Header == None:
+ if self._Prebuild is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Prebuild == None:
+ if self._Prebuild is None:
self._Prebuild = ''
return self._Prebuild
def _GetPostbuild(self):
- if self._Postbuild == None:
- if self._Header == None:
+ if self._Postbuild is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Postbuild == None:
+ if self._Postbuild is None:
self._Postbuild = ''
return self._Postbuild
## Retrieve FLASH_DEFINITION
def _GetBuildNumber(self):
- if self._BuildNumber == None:
- if self._Header == None:
+ if self._BuildNumber is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BuildNumber == None:
+ if self._BuildNumber is None:
self._BuildNumber = ''
return self._BuildNumber
## Retrieve MAKEFILE_NAME
def _GetMakefileName(self):
- if self._MakefileName == None:
- if self._Header == None:
+ if self._MakefileName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._MakefileName == None:
+ if self._MakefileName is None:
self._MakefileName = ''
return self._MakefileName
## Retrieve BsBaseAddress
def _GetBsBaseAddress(self):
- if self._BsBaseAddress == None:
- if self._Header == None:
+ if self._BsBaseAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BsBaseAddress == None:
+ if self._BsBaseAddress is None:
self._BsBaseAddress = ''
return self._BsBaseAddress
## Retrieve RtBaseAddress
def _GetRtBaseAddress(self):
- if self._RtBaseAddress == None:
- if self._Header == None:
+ if self._RtBaseAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._RtBaseAddress == None:
+ if self._RtBaseAddress is None:
self._RtBaseAddress = ''
return self._RtBaseAddress
## Retrieve the top address for the load fix address
def _GetLoadFixAddress(self):
- if self._LoadFixAddress == None:
- if self._Header == None:
+ if self._LoadFixAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._LoadFixAddress == None:
+ if self._LoadFixAddress is None:
self._LoadFixAddress = self._Macros.get(TAB_FIX_LOAD_TOP_MEMORY_ADDRESS, '0')
try:
@@ -608,33 +608,33 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve RFCLanguage filter
def _GetRFCLanguages(self):
- if self._RFCLanguages == None:
- if self._Header == None:
+ if self._RFCLanguages is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._RFCLanguages == None:
+ if self._RFCLanguages is None:
self._RFCLanguages = []
return self._RFCLanguages
## Retrieve ISOLanguage filter
def _GetISOLanguages(self):
- if self._ISOLanguages == None:
- if self._Header == None:
+ if self._ISOLanguages is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._ISOLanguages == None:
+ if self._ISOLanguages is None:
self._ISOLanguages = []
return self._ISOLanguages
## Retrieve the GUID string for VPD tool
def _GetVpdToolGuid(self):
- if self._VpdToolGuid == None:
- if self._Header == None:
+ if self._VpdToolGuid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._VpdToolGuid == None:
+ if self._VpdToolGuid is None:
self._VpdToolGuid = ''
return self._VpdToolGuid
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
- if self._SkuIds == None:
+ if self._SkuIds is None:
self._SkuIds = sdict()
RecordList = self._RawData[MODEL_EFI_SKU_ID, self._Arch]
for Record in RecordList:
@@ -646,7 +646,7 @@ class DscBuildData(PlatformBuildClassObject):
File=self.MetaFile, Line=Record[-1])
Pattern = re.compile('^[1-9]\d*|0$')
HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
- if Pattern.match(Record[0]) == None and HexPattern.match(Record[0]) == None:
+ if Pattern.match(Record[0]) is None and HexPattern.match(Record[0]) is None:
EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not IsValidWord(Record[1]):
@@ -661,7 +661,7 @@ class DscBuildData(PlatformBuildClassObject):
def ToInt(self,intstr):
return int(intstr,16) if intstr.upper().startswith("0X") else int(intstr)
def _GetDefaultStores(self):
- if self.DefaultStores == None:
+ if self.DefaultStores is None:
self.DefaultStores = sdict()
RecordList = self._RawData[MODEL_EFI_DEFAULT_STORES, self._Arch]
for Record in RecordList:
@@ -673,7 +673,7 @@ class DscBuildData(PlatformBuildClassObject):
File=self.MetaFile, Line=Record[-1])
Pattern = re.compile('^[1-9]\d*|0$')
HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
- if Pattern.match(Record[0]) == None and HexPattern.match(Record[0]) == None:
+ if Pattern.match(Record[0]) is None and HexPattern.match(Record[0]) is None:
EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not IsValidWord(Record[1]):
@@ -689,7 +689,7 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve [Components] section information
def _GetModules(self):
- if self._Modules != None:
+ if self._Modules is not None:
return self._Modules
self._Modules = sdict()
@@ -788,13 +788,13 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve all possible library instances used in this platform
def _GetLibraryInstances(self):
- if self._LibraryInstances == None:
+ if self._LibraryInstances is None:
self._GetLibraryClasses()
return self._LibraryInstances
## Retrieve [LibraryClasses] information
def _GetLibraryClasses(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
self._LibraryInstances = []
#
# tdict is a special dict kind of type, used for selecting correct
@@ -832,7 +832,7 @@ class DscBuildData(PlatformBuildClassObject):
# try all possible module types
for ModuleType in SUP_MODULE_LIST:
LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
- if LibraryInstance == None:
+ if LibraryInstance is None:
continue
self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
@@ -859,7 +859,7 @@ class DscBuildData(PlatformBuildClassObject):
return self._LibraryClasses
def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
- if self._DecPcds == None:
+ if self._DecPcds is None:
FdfInfList = []
if GlobalData.gFdfParser:
@@ -1121,7 +1121,7 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve all PCD settings in platform
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self.__ParsePcdFromCommandLine()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
@@ -1156,7 +1156,7 @@ class DscBuildData(PlatformBuildClassObject):
print "PcdCName: %s, SkuName: %s, Value: %s" % (".".join((pcdobj.TokenSpaceGuidCName, pcdobj.TokenCName)), skuid,str(pcdobj.SkuInfoList[skuid].DefaultValue))
## Retrieve [BuildOptions]
def _GetBuildOptions(self):
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
#
# Retrieve build option for EDKII and EDK style module
@@ -1178,7 +1178,7 @@ class DscBuildData(PlatformBuildClassObject):
return self._BuildOptions
def GetBuildOptionsByModuleType(self, Edk, ModuleType):
- if self._ModuleTypeOptions == None:
+ if self._ModuleTypeOptions is None:
self._ModuleTypeOptions = sdict()
if (Edk, ModuleType) not in self._ModuleTypeOptions:
options = sdict()
@@ -1471,7 +1471,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid, SkuName]
- if Setting == None:
+ if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
if (PcdCName, TokenSpaceGuid) in PcdValueDict:
@@ -2256,7 +2256,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
@@ -2428,7 +2428,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName,DefaultStore, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid,DefaultStore]
- if Setting == None:
+ if Setting is None:
continue
VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
@@ -2498,7 +2498,7 @@ class DscBuildData(PlatformBuildClassObject):
pcd.DatumType = pcdDecObject.DatumType
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
- if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue == None):
+ if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue is None):
sku.HiiDefaultValue = pcdDecObject.DefaultValue
for default_store in sku.DefaultStoreDict:
sku.DefaultStoreDict[default_store]=pcdDecObject.DefaultValue
@@ -2582,7 +2582,7 @@ class DscBuildData(PlatformBuildClassObject):
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
#
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
@@ -2691,7 +2691,7 @@ class DscBuildData(PlatformBuildClassObject):
self.Pcds[Name, Guid].DefaultValue = Value
@property
def DecPcds(self):
- if self._DecPcds == None:
+ if self._DecPcds is None:
FdfInfList = []
if GlobalData.gFdfParser:
FdfInfList = GlobalData.gFdfParser.Profile.InfList
diff --git a/BaseTools/Source/Python/Workspace/InfBuildData.py b/BaseTools/Source/Python/Workspace/InfBuildData.py
index 7ea9b56d5dec..ded8f610c9c1 100644
--- a/BaseTools/Source/Python/Workspace/InfBuildData.py
+++ b/BaseTools/Source/Python/Workspace/InfBuildData.py
@@ -179,7 +179,7 @@ class InfBuildData(ModuleBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
# EDK_GLOBAL defined macros can be applied to EDK module
if self.AutoGenVersion < 0x00010005:
@@ -246,7 +246,7 @@ class InfBuildData(ModuleBuildClassObject):
# items defined _PROPERTY_ don't need additional processing
if Name in self:
self[Name] = Value
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
self._Defs[Name] = Value
self._Macros[Name] = Value
@@ -254,15 +254,15 @@ class InfBuildData(ModuleBuildClassObject):
elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
Name = 'UEFI_SPECIFICATION_VERSION'
- if self._Specification == None:
+ if self._Specification is None:
self._Specification = sdict()
self._Specification[Name] = GetHexVerValue(Value)
- if self._Specification[Name] == None:
+ if self._Specification[Name] is None:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"'%s' format is not supported for %s" % (Value, Name),
File=self.MetaFile, Line=Record[-1])
elif Name == 'LIBRARY_CLASS':
- if self._LibraryClass == None:
+ if self._LibraryClass is None:
self._LibraryClass = []
ValueList = GetSplitValueList(Value)
LibraryClass = ValueList[0]
@@ -272,30 +272,30 @@ class InfBuildData(ModuleBuildClassObject):
SupModuleList = SUP_MODULE_LIST
self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
elif Name == 'ENTRY_POINT':
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
self._ModuleEntryPointList.append(Value)
elif Name == 'UNLOAD_IMAGE':
- if self._ModuleUnloadImageList == None:
+ if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
if not Value:
continue
self._ModuleUnloadImageList.append(Value)
elif Name == 'CONSTRUCTOR':
- if self._ConstructorList == None:
+ if self._ConstructorList is None:
self._ConstructorList = []
if not Value:
continue
self._ConstructorList.append(Value)
elif Name == 'DESTRUCTOR':
- if self._DestructorList == None:
+ if self._DestructorList is None:
self._DestructorList = []
if not Value:
continue
self._DestructorList.append(Value)
elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
TokenList = GetSplitValueList(Value)
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
if len(TokenList) < 2:
self._CustomMakefile['MSFT'] = TokenList[0]
@@ -307,7 +307,7 @@ class InfBuildData(ModuleBuildClassObject):
File=self.MetaFile, Line=Record[-1])
self._CustomMakefile[TokenList[0]] = TokenList[1]
else:
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
self._Defs[Name] = Value
self._Macros[Name] = Value
@@ -329,10 +329,10 @@ class InfBuildData(ModuleBuildClassObject):
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
File=self.MetaFile, Line=LineNo)
- if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
if self._ModuleType == SUP_MODULE_SMM_CORE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
- if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
if self._ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
if self._ModuleType == SUP_MODULE_MM_STANDALONE:
@@ -357,7 +357,7 @@ class InfBuildData(ModuleBuildClassObject):
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
+ if self.Sources is None:
self._Sources = []
self._Sources.append(File)
else:
@@ -377,7 +377,7 @@ class InfBuildData(ModuleBuildClassObject):
for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:
Value = ReplaceMacro(Value, Macros, True)
if Name == "IMAGE_ENTRY_POINT":
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
self._ModuleEntryPointList.append(Value)
elif Name == "DPX_SOURCE":
@@ -387,7 +387,7 @@ class InfBuildData(ModuleBuildClassObject):
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
+ if self.Sources is None:
self._Sources = []
self._Sources.append(File)
else:
@@ -397,7 +397,7 @@ class InfBuildData(ModuleBuildClassObject):
# EdkLogger.warn("build", "Don't know how to do with macro [%s]" % Name,
# File=self.MetaFile, Line=LineNo)
else:
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
if ToolList[0] in self._TOOL_CODE_:
@@ -424,7 +424,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve file version
def _GetInfVersion(self):
- if self._AutoGenVersion == None:
+ if self._AutoGenVersion is None:
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
for Record in RecordList:
if Record[1] == TAB_INF_DEFINES_INF_VERSION:
@@ -436,34 +436,34 @@ class InfBuildData(ModuleBuildClassObject):
else:
self._AutoGenVersion = int(Record[2], 0)
break
- if self._AutoGenVersion == None:
+ if self._AutoGenVersion is None:
self._AutoGenVersion = 0x00010000
return self._AutoGenVersion
## Retrieve BASE_NAME
def _GetBaseName(self):
- if self._BaseName == None:
- if self._Header_ == None:
+ if self._BaseName is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._BaseName == None:
+ if self._BaseName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
return self._BaseName
## Retrieve DxsFile
def _GetDxsFile(self):
- if self._DxsFile == None:
- if self._Header_ == None:
+ if self._DxsFile is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._DxsFile == None:
+ if self._DxsFile is None:
self._DxsFile = ''
return self._DxsFile
## Retrieve MODULE_TYPE
def _GetModuleType(self):
- if self._ModuleType == None:
- if self._Header_ == None:
+ if self._ModuleType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleType == None:
+ if self._ModuleType is None:
self._ModuleType = 'BASE'
if self._ModuleType not in SUP_MODULE_LIST:
self._ModuleType = "USER_DEFINED"
@@ -471,17 +471,17 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve COMPONENT_TYPE
def _GetComponentType(self):
- if self._ComponentType == None:
- if self._Header_ == None:
+ if self._ComponentType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ComponentType == None:
+ if self._ComponentType is None:
self._ComponentType = 'USER_DEFINED'
return self._ComponentType
## Retrieve "BUILD_TYPE"
def _GetBuildType(self):
- if self._BuildType == None:
- if self._Header_ == None:
+ if self._BuildType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
if not self._BuildType:
self._BuildType = "BASE"
@@ -489,37 +489,37 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header_ == None:
+ if self._Guid is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
self._Guid = '00000000-0000-0000-0000-000000000000'
return self._Guid
## Retrieve module version
def _GetVersion(self):
- if self._Version == None:
- if self._Header_ == None:
+ if self._Version is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
self._Version = '0.0'
return self._Version
## Retrieve PCD_IS_DRIVER
def _GetPcdIsDriver(self):
- if self._PcdIsDriver == None:
- if self._Header_ == None:
+ if self._PcdIsDriver is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._PcdIsDriver == None:
+ if self._PcdIsDriver is None:
self._PcdIsDriver = ''
return self._PcdIsDriver
## Retrieve SHADOW
def _GetShadow(self):
- if self._Shadow == None:
- if self._Header_ == None:
+ if self._Shadow is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Shadow != None and self._Shadow.upper() == 'TRUE':
+ if self._Shadow is not None and self._Shadow.upper() == 'TRUE':
self._Shadow = True
else:
self._Shadow = False
@@ -527,79 +527,79 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve CUSTOM_MAKEFILE
def _GetMakefile(self):
- if self._CustomMakefile == None:
- if self._Header_ == None:
+ if self._CustomMakefile is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
return self._CustomMakefile
## Retrieve EFI_SPECIFICATION_VERSION
def _GetSpec(self):
- if self._Specification == None:
- if self._Header_ == None:
+ if self._Specification is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Specification == None:
+ if self._Specification is None:
self._Specification = {}
return self._Specification
## Retrieve LIBRARY_CLASS
def _GetLibraryClass(self):
- if self._LibraryClass == None:
- if self._Header_ == None:
+ if self._LibraryClass is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._LibraryClass == None:
+ if self._LibraryClass is None:
self._LibraryClass = []
return self._LibraryClass
## Retrieve ENTRY_POINT
def _GetEntryPoint(self):
- if self._ModuleEntryPointList == None:
- if self._Header_ == None:
+ if self._ModuleEntryPointList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
return self._ModuleEntryPointList
## Retrieve UNLOAD_IMAGE
def _GetUnloadImage(self):
- if self._ModuleUnloadImageList == None:
- if self._Header_ == None:
+ if self._ModuleUnloadImageList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleUnloadImageList == None:
+ if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
return self._ModuleUnloadImageList
## Retrieve CONSTRUCTOR
def _GetConstructor(self):
- if self._ConstructorList == None:
- if self._Header_ == None:
+ if self._ConstructorList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ConstructorList == None:
+ if self._ConstructorList is None:
self._ConstructorList = []
return self._ConstructorList
## Retrieve DESTRUCTOR
def _GetDestructor(self):
- if self._DestructorList == None:
- if self._Header_ == None:
+ if self._DestructorList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._DestructorList == None:
+ if self._DestructorList is None:
self._DestructorList = []
return self._DestructorList
## Retrieve definies other than above ones
def _GetDefines(self):
- if self._Defs == None:
- if self._Header_ == None:
+ if self._Defs is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
return self._Defs
## Retrieve binary files
def _GetBinaries(self):
- if self._Binaries == None:
+ if self._Binaries is None:
self._Binaries = []
RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
Macros = self._Macros
@@ -646,7 +646,7 @@ class InfBuildData(ModuleBuildClassObject):
self._Sources = []
return self._Sources
- if self._Sources == None:
+ if self._Sources is None:
self._Sources = []
RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
Macros = self._Macros
@@ -687,7 +687,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve library classes employed by this module
def _GetLibraryClassUses(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
self._LibraryClasses = sdict()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
for Record in RecordList:
@@ -700,7 +700,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve library names (for Edk.x style of modules)
def _GetLibraryNames(self):
- if self._Libraries == None:
+ if self._Libraries is None:
self._Libraries = []
RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
for Record in RecordList:
@@ -716,14 +716,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._ProtocolComments
## Retrieve protocols consumed/produced by this module
def _GetProtocols(self):
- if self._Protocols == None:
+ if self._Protocols is None:
self._Protocols = sdict()
self._ProtocolComments = sdict()
RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = ProtocolValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Protocol [%s] is not found under [Protocols] section in" % CName,
@@ -741,14 +741,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._PpiComments
## Retrieve PPIs consumed/produced by this module
def _GetPpis(self):
- if self._Ppis == None:
+ if self._Ppis is None:
self._Ppis = sdict()
self._PpiComments = sdict()
RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = PpiValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of PPI [%s] is not found under [Ppis] section in " % CName,
@@ -766,14 +766,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._GuidComments
## Retrieve GUIDs consumed/produced by this module
def _GetGuids(self):
- if self._Guids == None:
+ if self._Guids is None:
self._Guids = sdict()
self._GuidComments = sdict()
RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = GuidValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % CName,
@@ -788,7 +788,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve include paths necessary for this module (for Edk.x style of modules)
def _GetIncludes(self):
- if self._Includes == None:
+ if self._Includes is None:
self._Includes = []
if self._SourceOverridePath:
self._Includes.append(self._SourceOverridePath)
@@ -845,7 +845,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve packages this module depends on
def _GetPackages(self):
- if self._Packages == None:
+ if self._Packages is None:
self._Packages = []
RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
Macros = self._Macros
@@ -868,7 +868,7 @@ class InfBuildData(ModuleBuildClassObject):
return self._PcdComments
## Retrieve PCDs used in this module
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self._PcdComments = sdict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
@@ -880,7 +880,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve build options specific to this module
def _GetBuildOptions(self):
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
for Record in RecordList:
@@ -897,13 +897,13 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve dependency expression
def _GetDepex(self):
- if self._Depex == None:
+ if self._Depex is None:
self._Depex = tdict(False, 2)
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
# If the module has only Binaries and no Sources, then ignore [Depex]
- if self.Sources == None or self.Sources == []:
- if self.Binaries != None and self.Binaries != []:
+ if self.Sources is None or self.Sources == []:
+ if self.Binaries is not None and self.Binaries != []:
return self._Depex
# PEIM and DXE drivers must have a valid [Depex] section
@@ -935,18 +935,18 @@ class InfBuildData(ModuleBuildClassObject):
elif Token.endswith(".inf"): # module file name
ModuleFile = os.path.normpath(Token)
Module = self.BuildDatabase[ModuleFile]
- if Module == None:
+ if Module is None:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
ExtraData=Token, File=self.MetaFile, Line=Record[-1])
DepexList.append(Module.Guid)
else:
# get the GUID value now
Value = ProtocolValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
Value = PpiValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
Value = GuidValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of [%s] is not found in" % Token,
@@ -958,7 +958,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve depedency expression
def _GetDepexExpression(self):
- if self._DepexExpression == None:
+ if self._DepexExpression is None:
self._DepexExpression = tdict(False, 2)
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
DepexExpression = sdict()
@@ -989,7 +989,7 @@ class InfBuildData(ModuleBuildClassObject):
# get the guid value
if TokenSpaceGuid not in self.Guids:
Value = GuidValue(TokenSpaceGuid, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
@@ -1006,7 +1006,7 @@ class InfBuildData(ModuleBuildClassObject):
for PcdCName, TokenSpaceGuid in PcdList:
PcdRealName = PcdCName
Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
ValueList = AnalyzePcdData(Setting)
DefaultValue = ValueList[0]
@@ -1095,7 +1095,7 @@ class InfBuildData(ModuleBuildClassObject):
#
# Check whether the token value exist or not.
#
- if Pcd.TokenValue == None or Pcd.TokenValue == "":
+ if Pcd.TokenValue is None or Pcd.TokenValue == "":
EdkLogger.error(
'build',
FORMAT_INVALID,
@@ -1108,7 +1108,7 @@ class InfBuildData(ModuleBuildClassObject):
#
ReIsValidPcdTokenValue = re.compile(r"^[0][x|X][0]*[0-9a-fA-F]{1,8}$", re.DOTALL)
if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
- if ReIsValidPcdTokenValue.match(Pcd.TokenValue) == None:
+ if ReIsValidPcdTokenValue.match(Pcd.TokenValue) is None:
EdkLogger.error(
'build',
FORMAT_INVALID,
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py
index ee4ba6869f80..0cfec9023261 100644
--- a/BaseTools/Source/Python/Workspace/MetaDataTable.py
+++ b/BaseTools/Source/Python/Workspace/MetaDataTable.py
@@ -113,7 +113,7 @@ class Table(object):
SqlCommand = """select max(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
Id = Record[0][0]
- if Id == None:
+ if Id is None:
Id = self.IdBase
return Id
@@ -311,7 +311,7 @@ class TableDataModel(Table):
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
- if Count != None and Count != 0:
+ if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index 2eb4c75e4658..f4c1868483d9 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -241,7 +241,7 @@ class MetaFileParser(object):
self.Start()
# No specific ARCH or Platform given, use raw data
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch)
# Do post-process if necessary
@@ -620,7 +620,7 @@ class InfParser(MetaFileParser):
self._ValueList = ['', '', '']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
Comments = []
continue
@@ -952,7 +952,7 @@ class DscParser(MetaFileParser):
self._ValueList = ['', '', '']
self._SectionParser[SectionType](self)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
@@ -1361,7 +1361,7 @@ class DscParser(MetaFileParser):
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
@@ -1740,7 +1740,7 @@ class DecParser(MetaFileParser):
# section content
self._ValueList = ['', '', '']
self._SectionParser[self._SectionType[0]](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py
index d8549c9d66e6..be3fb3d68856 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py
@@ -140,11 +140,11 @@ class ModuleTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
- if Platform != None and Platform != 'COMMON':
+ if Platform is not None and Platform != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -221,7 +221,7 @@ class PackageTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -341,9 +341,9 @@ class PlatformTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,Scope3,ID,StartLine"
- if Scope1 != None and Scope1 != 'COMMON':
+ if Scope1 is not None and Scope1 != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
- if Scope2 != None and Scope2 != 'COMMON':
+ if Scope2 is not None and Scope2 != 'COMMON':
# Cover the case that CodeBase is 'COMMON' for BuildOptions section
if '.' in Scope2:
Index = Scope2.index('.')
@@ -352,12 +352,12 @@ class PlatformTable(MetaFileTable):
else:
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
else:
ConditionString += " AND BelongsToItem<0"
- if FromItem != None:
+ if FromItem is not None:
ConditionString += " AND FromItem=%s" % FromItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
index c760e57b8f64..abe34cf9a071 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
@@ -118,16 +118,16 @@ def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, To
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
else:
LibraryPath = Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
return []
LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
# for those forced library instance (NULL library), add a fake library class
if LibraryClassName.startswith("NULL"):
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
+ elif LibraryModule.LibraryClass is None \
or len(LibraryModule.LibraryClass) == 0 \
or (ModuleType != 'USER_DEFINED'
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
@@ -139,7 +139,7 @@ def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, To
else:
LibraryModule = LibraryInstance[LibraryClassName]
- if LibraryModule == None:
+ if LibraryModule is None:
continue
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
@@ -239,12 +239,12 @@ def _ResolveLibraryReference(Module, Platform):
M = LibraryConsumerList.pop()
for LibraryName in M.Libraries:
Library = Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
+ if Library is None:
for Key in Platform.LibraryClasses.data.keys():
if LibraryName.upper() == Key.upper():
Library = Platform.LibraryClasses[Key, ':dummy:']
break
- if Library == None:
+ if Library is None:
continue
if Library not in LibraryList:
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index a3407d113e0f..2b888c0610c3 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -214,7 +214,7 @@ class WorkspaceDatabase(object):
else:
curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
- if rootPath == "" or rootPath == None:
+ if rootPath == "" or rootPath is None:
EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
determine whether database file is out of date!\n")
@@ -308,13 +308,13 @@ determine whether database file is out of date!\n")
Platform = self.BuildObject[PathClass(PlatformFile), 'COMMON']
except:
Platform = None
- if Platform != None:
+ if Platform is not None:
PlatformList.append(Platform)
return PlatformList
def _MapPlatform(self, Dscfile):
Platform = self.BuildObject[PathClass(Dscfile), 'COMMON']
- if Platform == None:
+ if Platform is None:
EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
return Platform
diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py
index d555dce9b3bc..966a2aa5abf1 100644
--- a/BaseTools/Source/Python/build/BuildReport.py
+++ b/BaseTools/Source/Python/build/BuildReport.py
@@ -722,7 +722,7 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time
Line = From.readline()
# empty string means "end"
- if Line != None and Line != "":
+ if Line is not None and Line != "":
To(Line.rstrip())
else:
break
@@ -904,7 +904,7 @@ class PcdReport(object):
elif ReportSubType == 2:
PcdDict = self.UnusedPcds
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, gSectionStart)
if ReportSubType == 1:
FileWrite(File, "Conditional Directives used by the build system")
@@ -966,7 +966,7 @@ class PcdReport(object):
PcdValue = DecDefaultValue
if DscDefaultValue:
PcdValue = DscDefaultValue
- if ModulePcdSet != None:
+ if ModulePcdSet is not None:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type) not in ModulePcdSet:
continue
InfDefault, PcdValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
@@ -985,7 +985,7 @@ class PcdReport(object):
break
if First:
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, "")
FileWrite(File, Key)
First = False
@@ -993,35 +993,35 @@ class PcdReport(object):
if Pcd.DatumType in ('UINT8', 'UINT16', 'UINT32', 'UINT64'):
PcdValueNumber = int(PcdValue.strip(), 0)
- if DecDefaultValue == None:
+ if DecDefaultValue is None:
DecMatch = True
else:
DecDefaultValueNumber = int(DecDefaultValue.strip(), 0)
DecMatch = (DecDefaultValueNumber == PcdValueNumber)
- if InfDefaultValue == None:
+ if InfDefaultValue is None:
InfMatch = True
else:
InfDefaultValueNumber = int(InfDefaultValue.strip(), 0)
InfMatch = (InfDefaultValueNumber == PcdValueNumber)
- if DscDefaultValue == None:
+ if DscDefaultValue is None:
DscMatch = True
else:
DscDefaultValueNumber = int(DscDefaultValue.strip(), 0)
DscMatch = (DscDefaultValueNumber == PcdValueNumber)
else:
- if DecDefaultValue == None:
+ if DecDefaultValue is None:
DecMatch = True
else:
DecMatch = (DecDefaultValue.strip() == PcdValue.strip())
- if InfDefaultValue == None:
+ if InfDefaultValue is None:
InfMatch = True
else:
InfMatch = (InfDefaultValue.strip() == PcdValue.strip())
- if DscDefaultValue == None:
+ if DscDefaultValue is None:
DscMatch = True
else:
DscMatch = (DscDefaultValue.strip() == PcdValue.strip())
@@ -1087,7 +1087,7 @@ class PcdReport(object):
else:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*M')
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
if IsStructure:
continue
if not TypeName in ('PATCH', 'FLAG', 'FIXED'):
@@ -1111,7 +1111,7 @@ class PcdReport(object):
else:
FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 19, ModulePath, ModuleDefault.strip()))
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, gSectionEnd)
else:
if not ReportSubType and ModulePcdSet:
@@ -1127,7 +1127,7 @@ class PcdReport(object):
return HasDscOverride
def PrintPcdDefault(self, File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue):
- if not DscMatch and DscDefaultValue != None:
+ if not DscMatch and DscDefaultValue is not None:
Value = DscDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1136,7 +1136,7 @@ class PcdReport(object):
FileWrite(File, '%s' % (Array))
else:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', Value))
- if not InfMatch and InfDefaultValue != None:
+ if not InfMatch and InfDefaultValue is not None:
Value = InfDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1146,7 +1146,7 @@ class PcdReport(object):
else:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', Value))
- if not DecMatch and DecDefaultValue != None:
+ if not DecMatch and DecDefaultValue is not None:
Value = DecDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1971,7 +1971,7 @@ class PlatformReport(object):
self.PcdReport = PcdReport(Wa)
self.FdReportList = []
- if "FLASH" in ReportType and Wa.FdfProfile and MaList == None:
+ if "FLASH" in ReportType and Wa.FdfProfile and MaList is None:
for Fd in Wa.FdfProfile.FdDict:
self.FdReportList.append(FdReport(Wa.FdfProfile.FdDict[Fd], Wa))
@@ -1984,7 +1984,7 @@ class PlatformReport(object):
self.DepexParser = DepexParser(Wa)
self.ModuleReportList = []
- if MaList != None:
+ if MaList is not None:
self._IsModuleBuild = True
for Ma in MaList:
self.ModuleReportList.append(ModuleReport(Ma, ReportType))
@@ -1994,13 +1994,13 @@ class PlatformReport(object):
ModuleAutoGenList = []
for ModuleKey in Pa.Platform.Modules:
ModuleAutoGenList.append(Pa.Platform.Modules[ModuleKey].M)
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
if Pa.Arch in GlobalData.gFdfParser.Profile.InfDict:
INFList = GlobalData.gFdfParser.Profile.InfDict[Pa.Arch]
for InfName in INFList:
InfClass = PathClass(NormPath(InfName), Wa.WorkspaceDir, Pa.Arch)
Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile)
- if Ma == None:
+ if Ma is None:
continue
if Ma not in ModuleAutoGenList:
ModuleAutoGenList.append(Ma)
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index 85612d90ced1..f211f8c64116 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -241,7 +241,7 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time
Line = From.readline()
# empty string means "end"
- if Line != None and Line != "":
+ if Line is not None and Line != "":
To(Line.rstrip())
else:
break
@@ -299,9 +299,9 @@ def LaunchCommand(Command, WorkingDir):
except: # in case of aborting
# terminate the threads redirecting the program output
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
- if EndOfProcedure != None:
+ if EndOfProcedure is not None:
EndOfProcedure.set()
- if Proc == None:
+ if Proc is None:
if type(Command) != type(""):
Command = " ".join(Command)
EdkLogger.error("build", COMMAND_FAILURE, "Failed to start command", ExtraData="%s [%s]" % (Command, WorkingDir))
@@ -375,7 +375,7 @@ class BuildUnit:
# @param Other The other BuildUnit object compared to
#
def __eq__(self, Other):
- return Other != None and self.BuildObject == Other.BuildObject \
+ return Other is not None and self.BuildObject == Other.BuildObject \
and self.BuildObject.Arch == Other.BuildObject.Arch
## hash() method
@@ -633,7 +633,7 @@ class BuildTask:
self.BuildItem = BuildItem
self.DependencyList = []
- if Dependency == None:
+ if Dependency is None:
Dependency = BuildItem.Dependency
else:
Dependency.extend(BuildItem.Dependency)
@@ -795,7 +795,7 @@ class Build():
BinCacheSource = mws.join(self.WorkspaceDir, BinCacheSource)
GlobalData.gBinCacheSource = BinCacheSource
else:
- if GlobalData.gBinCacheSource != None:
+ if GlobalData.gBinCacheSource is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-source.")
if GlobalData.gBinCacheDest:
@@ -804,7 +804,7 @@ class Build():
BinCacheDest = mws.join(self.WorkspaceDir, BinCacheDest)
GlobalData.gBinCacheDest = BinCacheDest
else:
- if GlobalData.gBinCacheDest != None:
+ if GlobalData.gBinCacheDest is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-destination.")
if self.ConfDirectory:
@@ -907,7 +907,7 @@ class Build():
# if no tool chain given in command line, get it from target.txt
if not self.ToolChainList:
self.ToolChainList = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
- if self.ToolChainList == None or len(self.ToolChainList) == 0:
+ if self.ToolChainList is None or len(self.ToolChainList) == 0:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.\n")
# check if the tool chains are defined or not
@@ -935,7 +935,7 @@ class Build():
ToolChainFamily.append(ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool])
self.ToolChainFamily = ToolChainFamily
- if self.ThreadNumber == None:
+ if self.ThreadNumber is None:
self.ThreadNumber = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
if self.ThreadNumber == '':
self.ThreadNumber = 0
@@ -1224,7 +1224,7 @@ class Build():
# for dependent modules/Libraries
#
def _BuildPa(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False, FfsCommand={}):
- if AutoGenObject == None:
+ if AutoGenObject is None:
return False
# skip file generation for cleanxxx targets, run and fds target
@@ -1252,7 +1252,7 @@ class Build():
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
- if BuildCommand == None or len(BuildCommand) == 0:
+ if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
@@ -1343,7 +1343,7 @@ class Build():
# for dependent modules/Libraries
#
def _Build(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False):
- if AutoGenObject == None:
+ if AutoGenObject is None:
return False
# skip file generation for cleanxxx targets, run and fds target
@@ -1372,7 +1372,7 @@ class Build():
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
- if BuildCommand == None or len(BuildCommand) == 0:
+ if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
@@ -1536,7 +1536,7 @@ class Build():
FvMap.readline()
for Line in FvMap:
MatchGuid = GuidPattern.match(Line)
- if MatchGuid != None:
+ if MatchGuid is not None:
#
# Replace GUID with module name
#
@@ -1548,7 +1548,7 @@ class Build():
# Add the debug image full path.
#
MatchGuid = GuidName.match(Line)
- if MatchGuid != None:
+ if MatchGuid is not None:
GuidString = MatchGuid.group().split("=")[1]
if GuidString.upper() in ModuleList:
MapBuffer.write('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))
@@ -1758,7 +1758,7 @@ class Build():
for Module in Pa.Platform.Modules:
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None:
+ if Ma is None:
continue
self.BuildModules.append(Ma)
self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict)
@@ -1778,7 +1778,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -1856,7 +1856,7 @@ class Build():
for Module in Pa.Platform.Modules:
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None: continue
+ if Ma is None: continue
MaList.append(Ma)
if Ma.CanSkipbyHash():
self.HashSkipModules.append(Ma)
@@ -1936,7 +1936,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -2021,13 +2021,13 @@ class Build():
AutoGenStart = time.time()
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
- if Pa == None:
+ if Pa is None:
continue
ModuleList = []
for Inf in Pa.Platform.Modules:
ModuleList.append(Inf)
# Add the INF only list in FDF
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
for InfName in GlobalData.gFdfParser.Profile.InfList:
Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)
if Inf in Pa.Platform.Modules:
@@ -2037,7 +2037,7 @@ class Build():
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None:
+ if Ma is None:
continue
if Ma.CanSkipbyHash():
self.HashSkipModules.append(Ma)
@@ -2122,7 +2122,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -2263,18 +2263,18 @@ class Build():
FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gFileTimeStampCache")
if Utils.gFileTimeStampCache == {} and os.path.isfile(FilePath):
Utils.gFileTimeStampCache = Utils.DataRestore(FilePath)
- if Utils.gFileTimeStampCache == None:
+ if Utils.gFileTimeStampCache is None:
Utils.gFileTimeStampCache = {}
FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gDependencyDatabase")
if Utils.gDependencyDatabase == {} and os.path.isfile(FilePath):
Utils.gDependencyDatabase = Utils.DataRestore(FilePath)
- if Utils.gDependencyDatabase == None:
+ if Utils.gDependencyDatabase is None:
Utils.gDependencyDatabase = {}
def ParseDefines(DefineList=[]):
DefineDict = {}
- if DefineList != None:
+ if DefineList is not None:
for Define in DefineList:
DefineTokenList = Define.split("=", 1)
if not GlobalData.gMacroNamePattern.match(DefineTokenList[0]):
@@ -2403,16 +2403,16 @@ def Main():
GlobalData.gCaseInsensitive = Option.CaseInsensitive
# Set log level
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Option.LogFile != None:
+ if Option.LogFile is not None:
EdkLogger.SetLogFile(Option.LogFile)
if Option.WarningAsError == True:
@@ -2472,13 +2472,13 @@ def Main():
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
- if Option.PlatformFile != None:
+ if Option.PlatformFile is not None:
if os.path.isabs (Option.PlatformFile):
if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (Workspace) == 0:
Option.PlatformFile = NormFile(os.path.normpath(Option.PlatformFile), Workspace)
Option.PlatformFile = PathClass(Option.PlatformFile, Workspace)
- if Option.FdfFile != None:
+ if Option.FdfFile is not None:
if os.path.isabs (Option.FdfFile):
if os.path.normcase (os.path.normpath(Option.FdfFile)).find (Workspace) == 0:
Option.FdfFile = NormFile(os.path.normpath(Option.FdfFile), Workspace)
@@ -2487,7 +2487,7 @@ def Main():
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
- if Option.Flag != None and Option.Flag not in ['-c', '-s']:
+ if Option.Flag is not None and Option.Flag not in ['-c', '-s']:
EdkLogger.error("build", OPTION_VALUE_INVALID, "UNI flag must be one of -c or -s")
MyBuild = Build(Target, Workspace, Option)
@@ -2504,35 +2504,35 @@ def Main():
#
BuildError = False
except FatalError, X:
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
ReturnCode = X.args[0]
except Warning, X:
# error from Fdf parser
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
else:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
except:
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
# try to get the meta-file from the object causing exception
Tb = sys.exc_info()[-1]
MetaFile = GlobalData.gProcessingFile
- while Tb != None:
+ while Tb is not None:
if 'self' in Tb.tb_frame.f_locals and hasattr(Tb.tb_frame.f_locals['self'], 'MetaFile'):
MetaFile = Tb.tb_frame.f_locals['self'].MetaFile
Tb = Tb.tb_next
@@ -2566,7 +2566,7 @@ def Main():
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
else:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
- if MyBuild != None:
+ if MyBuild is not None:
if not BuildError:
MyBuild.BuildReport.GenerateReport(BuildDurationStr, LogBuildTime(MyBuild.AutoGenTime), LogBuildTime(MyBuild.MakeTime), LogBuildTime(MyBuild.GenFdsTime))
MyBuild.Db.Close()
--
2.16.2.windows.1
^ permalink raw reply related [flat|nested] 2+ messages in thread
* Re: [PATCH v1 1/1] BaseTools: Remove equality operator with None
2018-03-26 20:25 ` [PATCH v1 1/1] BaseTools: Remove equality operator with None Jaben Carsey
@ 2018-03-28 2:55 ` Zhu, Yonghong
0 siblings, 0 replies; 2+ messages in thread
From: Zhu, Yonghong @ 2018-03-28 2:55 UTC (permalink / raw)
To: Carsey, Jaben, edk2-devel@lists.01.org; +Cc: Gao, Liming, Zhu, Yonghong
Thanks for this update.
Reviewed-by: Yonghong Zhu <yonghong.zhu@intel.com>
Best Regards,
Zhu Yonghong
-----Original Message-----
From: Carsey, Jaben
Sent: Tuesday, March 27, 2018 4:26 AM
To: edk2-devel@lists.01.org
Cc: Zhu, Yonghong <yonghong.zhu@intel.com>; Gao, Liming <liming.gao@intel.com>
Subject: [PATCH v1 1/1] BaseTools: Remove equality operator with None
replace "== None" with "is None" and "!= None" with "is not None"
Cc: Yonghong Zhu <yonghong.zhu@intel.com>
Cc: Liming Gao <liming.gao@intel.com>
Contributed-under: TianoCore Contribution Agreement 1.1
Signed-off-by: Jaben Carsey <jaben.carsey@intel.com>
---
BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py | 20 +--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py | 6 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py | 10 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py | 6 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py | 74 ++++----
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py | 32 ++--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py | 32 ++--
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py | 2 +-
BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py | 12 +-
BaseTools/Source/Python/AutoGen/AutoGen.py | 176 +++++++++---------
BaseTools/Source/Python/AutoGen/BuildEngine.py | 10 +-
BaseTools/Source/Python/AutoGen/GenC.py | 4 +-
BaseTools/Source/Python/AutoGen/GenDepex.py | 10 +-
BaseTools/Source/Python/AutoGen/GenMake.py | 6 +-
BaseTools/Source/Python/AutoGen/GenPcdDb.py | 2 +-
BaseTools/Source/Python/AutoGen/IdfClassObject.py | 4 +-
BaseTools/Source/Python/AutoGen/StrGather.py | 6 +-
BaseTools/Source/Python/AutoGen/UniClassObject.py | 22 +--
BaseTools/Source/Python/BPDG/BPDG.py | 10 +-
BaseTools/Source/Python/BPDG/GenVpd.py | 4 +-
BaseTools/Source/Python/Common/DecClassObject.py | 2 +-
BaseTools/Source/Python/Common/Dictionary.py | 2 +-
BaseTools/Source/Python/Common/DscClassObject.py | 4 +-
BaseTools/Source/Python/Common/EdkIIWorkspace.py | 2 +-
BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py | 30 ++--
BaseTools/Source/Python/Common/EdkLogger.py | 18 +-
BaseTools/Source/Python/Common/FdfClassObject.py | 2 +-
BaseTools/Source/Python/Common/FdfParserLite.py | 40 ++---
BaseTools/Source/Python/Common/InfClassObject.py | 2 +-
BaseTools/Source/Python/Common/Misc.py | 80 ++++-----
BaseTools/Source/Python/Common/Parsing.py | 4 +-
BaseTools/Source/Python/Common/String.py | 4 +-
BaseTools/Source/Python/Common/TargetTxtClassObject.py | 6 +-
BaseTools/Source/Python/Common/ToolDefClassObject.py | 2 +-
BaseTools/Source/Python/Common/VpdInfoFile.py | 12 +-
BaseTools/Source/Python/CommonDataClass/CommonClass.py | 10 +-
BaseTools/Source/Python/Ecc/CParser.py | 12 +-
BaseTools/Source/Python/Ecc/Check.py | 2 +-
BaseTools/Source/Python/Ecc/CodeFragmentCollector.py | 4 +-
BaseTools/Source/Python/Ecc/Ecc.py | 30 ++--
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py | 4 +-
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py | 12 +-
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py | 14 +-
BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py | 10 +-
BaseTools/Source/Python/Ecc/c.py | 62 +++----
BaseTools/Source/Python/Eot/CParser.py | 12 +-
BaseTools/Source/Python/Eot/CodeFragmentCollector.py | 2 +-
BaseTools/Source/Python/Eot/Eot.py | 6 +-
BaseTools/Source/Python/Eot/FvImage.py | 42 ++---
BaseTools/Source/Python/Eot/InfParserLite.py | 2 +-
BaseTools/Source/Python/Eot/Report.py | 2 +-
BaseTools/Source/Python/GenFds/AprioriSection.py | 4 +-
BaseTools/Source/Python/GenFds/Capsule.py | 2 +-
BaseTools/Source/Python/GenFds/CompressSection.py | 8 +-
BaseTools/Source/Python/GenFds/DataSection.py | 4 +-
BaseTools/Source/Python/GenFds/DepexSection.py | 2 +-
BaseTools/Source/Python/GenFds/EfiSection.py | 26 +--
BaseTools/Source/Python/GenFds/FdfParser.py | 50 +++---
BaseTools/Source/Python/GenFds/FfsFileStatement.py | 12 +-
BaseTools/Source/Python/GenFds/FfsInfStatement.py | 48 ++---
BaseTools/Source/Python/GenFds/Fv.py | 32 ++--
BaseTools/Source/Python/GenFds/FvImageSection.py | 12 +-
BaseTools/Source/Python/GenFds/GenFds.py | 46 ++---
BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py | 28 +--
BaseTools/Source/Python/GenFds/GuidSection.py | 30 ++--
BaseTools/Source/Python/GenFds/OptRomFileStatement.py | 2 +-
BaseTools/Source/Python/GenFds/OptRomInfStatement.py | 16 +-
BaseTools/Source/Python/GenFds/OptionRom.py | 4 +-
BaseTools/Source/Python/GenFds/Region.py | 10 +-
BaseTools/Source/Python/GenFds/Section.py | 8 +-
BaseTools/Source/Python/GenFds/UiSection.py | 6 +-
BaseTools/Source/Python/GenFds/VerSection.py | 6 +-
BaseTools/Source/Python/GenFds/Vtf.py | 4 +-
BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py | 24 +--
BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py | 4 +-
BaseTools/Source/Python/TargetTool/TargetTool.py | 28 +--
BaseTools/Source/Python/Trim/Trim.py | 26 +--
BaseTools/Source/Python/UPT/Core/DependencyRules.py | 4 +-
BaseTools/Source/Python/UPT/Core/IpiDb.py | 30 ++--
BaseTools/Source/Python/UPT/Core/PackageFile.py | 2 +-
BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py | 6 +-
BaseTools/Source/Python/UPT/InstallPkg.py | 2 +-
| 12 +-
BaseTools/Source/Python/UPT/Library/Misc.py | 10 +-
BaseTools/Source/Python/UPT/Library/ParserValidate.py | 30 ++--
BaseTools/Source/Python/UPT/Library/Parsing.py | 10 +-
BaseTools/Source/Python/UPT/Library/String.py | 8 +-
BaseTools/Source/Python/UPT/Library/UniClassObject.py | 22 +--
BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py | 10 +-
BaseTools/Source/Python/UPT/Logger/Log.py | 18 +-
BaseTools/Source/Python/UPT/MkPkg.py | 2 +-
BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py | 12 +-
BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py | 46 ++---
BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py | 6 +-
| 10 +-
BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py | 4 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py | 10 +-
BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py | 6 +-
BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py | 6 +-
BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py | 6 +-
BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfParserMisc.py | 4 +-
BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py | 2 +-
BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py | 4 +-
BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py | 18 +-
BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py | 18 +-
BaseTools/Source/Python/UPT/UPT.py | 2 +-
BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py | 2 +-
BaseTools/Source/Python/UPT/Xml/XmlParser.py | 38 ++--
BaseTools/Source/Python/Workspace/BuildClassObject.py | 2 +-
BaseTools/Source/Python/Workspace/DecBuildData.py | 36 ++--
BaseTools/Source/Python/Workspace/DscBuildData.py | 158 ++++++++--------
BaseTools/Source/Python/Workspace/InfBuildData.py | 190 ++++++++++----------
BaseTools/Source/Python/Workspace/MetaDataTable.py | 4 +-
BaseTools/Source/Python/Workspace/MetaFileParser.py | 10 +-
BaseTools/Source/Python/Workspace/MetaFileTable.py | 16 +-
BaseTools/Source/Python/Workspace/WorkspaceCommon.py | 12 +-
BaseTools/Source/Python/Workspace/WorkspaceDatabase.py | 6 +-
BaseTools/Source/Python/build/BuildReport.py | 38 ++--
BaseTools/Source/Python/build/build.py | 82 ++++-----
131 files changed, 1142 insertions(+), 1142 deletions(-)
diff --git a/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py b/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
index 92ee69978277..557ffa4505e4 100644
--- a/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
+++ b/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
@@ -49,18 +49,18 @@ def parseCmdArgs():
# validate the options
errors = []
- if options.WorkspacePath == None:
+ if options.WorkspacePath is None:
errors.append('- Please specify workspace path via option -w!')
elif not os.path.exists(options.WorkspacePath):
errors.append("- Invalid workspace path %s! The workspace path should be exist in absolute path!" % options.WorkspacePath)
- if options.PackagePath == None:
+ if options.PackagePath is None:
errors.append('- Please specify package DEC file path via option -p!')
elif not os.path.exists(options.PackagePath):
errors.append("- Invalid package's DEC file path %s! The DEC path should be exist in absolute path!" % options.PackagePath)
default = "C:\\Program Files\\doxygen\\bin\\doxygen.exe"
- if options.DoxygenPath == None:
+ if options.DoxygenPath is None:
if os.path.exists(default):
print "Warning: Assume doxygen tool is installed at %s. If not, please specify via -x" % default
options.DoxygenPath = default
@@ -69,7 +69,7 @@ def parseCmdArgs():
elif not os.path.exists(options.DoxygenPath):
errors.append("- Invalid doxygen tool path %s! The doxygen tool path should be exist in absolute path!" % options.DoxygenPath)
- if options.OutputPath != None:
+ if options.OutputPath is not None:
if not os.path.exists(options.OutputPath):
# create output
try:
@@ -77,7 +77,7 @@ def parseCmdArgs():
except:
errors.append('- Fail to create the output directory %s' % options.OutputPath)
else:
- if options.PackagePath != None and os.path.exists(options.PackagePath):
+ if options.PackagePath is not None and os.path.exists(options.PackagePath):
dirpath = os.path.dirname(options.PackagePath)
default = os.path.join (dirpath, "Document")
print 'Warning: Assume document output at %s. If not, please specify via option -o' % default
@@ -90,21 +90,21 @@ def parseCmdArgs():
else:
errors.append('- Please specify document output path via option -o!')
- if options.Arch == None:
+ if options.Arch is None:
options.Arch = 'ALL'
print "Warning: Assume arch is \"ALL\". If not, specify via -a"
- if options.DocumentMode == None:
+ if options.DocumentMode is None:
options.DocumentMode = "HTML"
print "Warning: Assume document mode is \"HTML\". If not, specify via -m"
- if options.IncludeOnly == None:
+ if options.IncludeOnly is None:
options.IncludeOnly = False
print "Warning: Assume generate package document for all package\'s source including publich interfaces and implementation libraries and modules."
if options.DocumentMode.lower() == 'chm':
default = "C:\\Program Files\\HTML Help Workshop\\hhc.exe"
- if options.HtmlWorkshopPath == None:
+ if options.HtmlWorkshopPath is None:
if os.path.exists(default):
print 'Warning: Assume the installation path of Microsoft HTML Workshop is %s. If not, specify via option -c.' % default
options.HtmlWorkshopPath = default
@@ -382,7 +382,7 @@ if __name__ == '__main__':
# create package model object firstly
pkgObj = createPackageObject(wspath, pkgpath)
- if pkgObj == None:
+ if pkgObj is None:
sys.exit(-1)
# create doxygen action model
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
index 2d0cc9d96e1f..488949f24b6f 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
@@ -58,7 +58,7 @@ class Page(BaseDoxygeItem):
return subpage
def AddPages(self, pageArray):
- if pageArray == None:
+ if pageArray is None:
return
for page in pageArray:
self.AddPage(page)
@@ -370,7 +370,7 @@ class DoxygenConfigFile:
self.mWarningFile = str.replace('\\', '/')
def FileExists(self, path):
- if path == None:
+ if path is None:
return False
if len(path) == 0:
return False
@@ -382,7 +382,7 @@ class DoxygenConfigFile:
return False
def AddFile(self, path):
- if path == None:
+ if path is None:
return
if len(path) == 0:
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
index 72becedb8e4e..9db16a63c07a 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
@@ -553,7 +553,7 @@ class EfiFvMapFile(object):
if line[0] != ' ':
# new entry
ret = rMapEntry.match(line)
- if ret != None:
+ if ret is not None:
name = ret.groups()[0]
baseaddr = int(ret.groups()[1], 16)
entry = int(ret.groups()[2], 16)
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
index 515e7a4fa7dd..bf1040d6bac4 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
@@ -34,7 +34,7 @@ class BaseINIFile(object):
if key not in cls._objs.keys():
cls._objs[key] = object.__new__(cls, *args, **kwargs)
- if parent != None:
+ if parent is not None:
cls._objs[key].AddParent(parent)
return cls._objs[key]
@@ -47,7 +47,7 @@ class BaseINIFile(object):
self._isModify = True
def AddParent(self, parent):
- if parent == None: return
+ if parent is None: return
if not hasattr(self, "_parents"):
self._parents = []
@@ -122,7 +122,7 @@ class BaseINIFile(object):
continue
m = section_re.match(templine)
- if m!= None: # found a section
+ if mis not None: # found a section
inGlobal = False
# Finish the latest section first
if len(sObjs) != 0:
@@ -165,7 +165,7 @@ class BaseINIFile(object):
def Destroy(self, parent):
# check referenced parent
- if parent != None:
+ if parent is not None:
assert parent in self._parents, "when destory ini object, can not found parent reference!"
self._parents.remove(parent)
@@ -307,7 +307,7 @@ class BaseINISection(object):
visit += 1
continue
line = line.split('#')[0].strip()
- if iniObj != None:
+ if iniObj is not None:
if line.endswith('}'):
iniObj._end = visit - self._start
if not iniObj.Parse():
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
index 5cf202857376..51de5cb74e3c 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
@@ -35,14 +35,14 @@ def WarnMsg(mess, fName=None, fNo=None):
def NormalMessage(type, mess, fName=None, fNo=None):
strMsg = type
- if fName != None:
+ if fName is not None:
strMsg += ' %s' % fName.replace('/', '\\')
- if fNo != None:
+ if fNo is not None:
strMsg += '(%d):' % fNo
else:
strMsg += ' :'
- if fName == None and fNo == None:
+ if fName is None and fNo is None:
strMsg += ' '
strMsg += mess
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
index 05fa2529be2d..7c120d85c255 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
@@ -74,7 +74,7 @@ class SurfaceObject(object):
def Load(self, relativePath):
# if has been loaded, directly return
- if self._fileObj != None: return True
+ if self._fileObj is not None: return True
relativePath = os.path.normpath(relativePath)
fullPath = os.path.join(self._workspace, relativePath)
@@ -160,7 +160,7 @@ class Platform(SurfaceObject):
return dsc.DSCFile
def GetModuleCount(self):
- if self.GetFileObj() == None:
+ if self.GetFileObj() is None:
ErrorMsg("Fail to get module count because DSC file has not been load!")
return len(self.GetFileObj().GetComponents())
@@ -171,7 +171,7 @@ class Platform(SurfaceObject):
def LoadModules(self, precallback=None, postcallback=None):
for obj in self.GetFileObj().GetComponents():
mFilename = obj.GetFilename()
- if precallback != None:
+ if precallback is not None:
precallback(self, mFilename)
arch = obj.GetArch()
if arch.lower() == 'common':
@@ -182,7 +182,7 @@ class Platform(SurfaceObject):
module = Module(self, self.GetWorkspace())
if module.Load(mFilename, arch, obj.GetOveridePcds(), obj.GetOverideLibs()):
self._modules.append(module)
- if postcallback != None:
+ if postcallback is not None:
postcallback(self, module)
else:
del module
@@ -222,7 +222,7 @@ class Platform(SurfaceObject):
for obj in objs:
if obj.GetPcdName().lower() == name.lower():
arr.append(obj)
- if arch != None:
+ if arch is not None:
arr = self.FilterObjsByArch(arr, arch)
return arr
@@ -292,7 +292,7 @@ class Platform(SurfaceObject):
newSect = newDsc.AddNewSection(oldSect.GetName())
for oldComObj in oldSect.GetObjects():
module = self.GetModuleObject(oldComObj.GetFilename(), oldSect.GetArch())
- if module == None: continue
+ if module is None: continue
newComObj = dsc.DSCComponentObject(newSect)
newComObj.SetFilename(oldComObj.GetFilename())
@@ -300,7 +300,7 @@ class Platform(SurfaceObject):
# add all library instance for override section
libdict = module.GetLibraries()
for libclass in libdict.keys():
- if libdict[libclass] != None:
+ if libdict[libclass] is not None:
newComObj.AddOverideLib(libclass, libdict[libclass].GetRelativeFilename().replace('\\', '/'))
# add all pcds for override section
@@ -338,7 +338,7 @@ class Module(SurfaceObject):
def Destroy(self):
for lib in self._libs.values():
- if lib != None:
+ if lib is not None:
lib.Destroy()
self._libs.clear()
@@ -351,12 +351,12 @@ class Module(SurfaceObject):
del self._ppis[:]
for protocol in self._protocols:
- if protocol != None:
+ if protocol is not None:
protocol.DeRef(self)
del self._protocols[:]
for guid in self._guids:
- if guid != None:
+ if guid is not None:
guid.DeRef(self)
del self._guids[:]
@@ -375,9 +375,9 @@ class Module(SurfaceObject):
return False
self._arch = arch
- if overidePcds != None:
+ if overidePcds is not None:
self._overideLibs = overideLibs
- if overideLibs != None:
+ if overideLibs is not None:
self._overidePcds = overidePcds
self._SearchLibraries()
@@ -403,7 +403,7 @@ class Module(SurfaceObject):
def GetPcds(self):
pcds = self._pcds.copy()
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
for name in lib._pcds.keys():
pcds[name] = lib._pcds[name]
return pcds
@@ -412,7 +412,7 @@ class Module(SurfaceObject):
ppis = []
ppis += self._ppis
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
ppis += lib._ppis
return ppis
@@ -420,7 +420,7 @@ class Module(SurfaceObject):
pros = []
pros = self._protocols
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
pros += lib._protocols
return pros
@@ -428,7 +428,7 @@ class Module(SurfaceObject):
guids = []
guids += self._guids
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
guids += lib._guids
return guids
@@ -436,12 +436,12 @@ class Module(SurfaceObject):
deps = []
deps += self._depexs
for lib in self._libs.values():
- if lib == None: continue
+ if lib is None: continue
deps += lib._depexs
return deps
def IsLibrary(self):
- return self.GetFileObj().GetDefine("LIBRARY_CLASS") != None
+ return self.GetFileObj().GetDefine("LIBRARY_CLASS") is not None
def GetLibraryInstance(self, classname, arch, type):
if classname not in self._libs.keys():
@@ -454,7 +454,7 @@ class Module(SurfaceObject):
parent = self.GetParent()
if issubclass(parent.__class__, Platform):
path = parent.GetLibraryPath(classname, arch, type)
- if path == None:
+ if path is None:
ErrorMsg('Fail to get library instance for %s' % classname, self.GetFilename())
return None
self._libs[classname] = Library(self, self.GetWorkspace())
@@ -477,7 +477,7 @@ class Module(SurfaceObject):
continue
classname = obj.GetClass()
instance = self.GetLibraryInstance(classname, arch, type)
- if not self.IsLibrary() and instance != None:
+ if not self.IsLibrary() and instance is not None:
instance._isInherit = False
if classname not in self._libs.keys():
@@ -490,7 +490,7 @@ class Module(SurfaceObject):
pros = []
deps = []
guids = []
- if self.GetFileObj() != None:
+ if self.GetFileObj() is not None:
pcds = self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('pcd'),
self.GetArch())
for pcd in pcds:
@@ -534,31 +534,31 @@ class Module(SurfaceObject):
objs = self.GetFileObj().GetSectionObjectsByName('packages')
for obj in objs:
package = self.GetPlatform().GetPackage(obj.GetPath())
- if package != None:
+ if package is not None:
self._packages.append(package)
def GetPackages(self):
return self._packages
def GetPcdObjects(self):
- if self.GetFileObj() == None:
+ if self.GetFileObj() is None:
return []
return self.GetFileObj().GetSectionObjectsByName('pcd')
def GetLibraryClassHeaderFilePath(self):
lcname = self.GetFileObj().GetProduceLibraryClass()
- if lcname == None: return None
+ if lcname is None: return None
pkgs = self.GetPackages()
for package in pkgs:
path = package.GetLibraryClassHeaderPathByName(lcname)
- if path != None:
+ if path is not None:
return os.path.realpath(os.path.join(package.GetFileObj().GetPackageRootPath(), path))
return None
def Reload(self, force=False, callback=None):
- if callback != None:
+ if callback is not None:
callback(self, "Starting reload...")
ret = SurfaceObject.Reload(self, force)
@@ -568,7 +568,7 @@ class Module(SurfaceObject):
return True
for lib in self._libs.values():
- if lib != None:
+ if lib is not None:
lib.Destroy()
self._libs.clear()
@@ -591,13 +591,13 @@ class Module(SurfaceObject):
del self._packages[:]
del self._depexs[:]
- if callback != None:
+ if callback is not None:
callback(self, "Searching libraries...")
self._SearchLibraries()
- if callback != None:
+ if callback is not None:
callback(self, "Searching packages...")
self._SearchPackage()
- if callback != None:
+ if callback is not None:
callback(self, "Searching surface items...")
self._SearchSurfaceItems()
@@ -665,16 +665,16 @@ class Package(SurfaceObject):
def Destroy(self):
for pcd in self._pcds.values():
- if pcd != None:
+ if pcd is not None:
pcd.Destroy()
for guid in self._guids.values():
- if guid != None:
+ if guid is not None:
guid.Destroy()
for protocol in self._protocols.values():
- if protocol != None:
+ if protocol is not None:
protocol.Destroy()
for ppi in self._ppis.values():
- if ppi != None:
+ if ppi is not None:
ppi.Destroy()
self._pcds.clear()
self._guids.clear()
@@ -689,7 +689,7 @@ class Package(SurfaceObject):
pcds = self.GetFileObj().GetSectionObjectsByName('pcds')
for pcd in pcds:
if pcd.GetPcdName() in self._pcds.keys():
- if self._pcds[pcd.GetPcdName()] != None:
+ if self._pcds[pcd.GetPcdName()] is not None:
self._pcds[pcd.GetPcdName()].AddDecObj(pcd)
else:
self._pcds[pcd.GetPcdName()] = PcdItem(pcd.GetPcdName(), self, pcd)
@@ -726,7 +726,7 @@ class Package(SurfaceObject):
def GetPcdDefineObjs(self, name=None):
arr = []
objs = self.GetFileObj().GetSectionObjectsByName('pcds')
- if name == None: return objs
+ if name is None: return objs
for obj in objs:
if obj.GetPcdName().lower() == name.lower():
@@ -772,7 +772,7 @@ class ModulePcd(object):
def __init__(self, parent, name, infObj, pcdItem):
assert issubclass(parent.__class__, Module), "Module's PCD's parent must be module!"
- assert pcdItem != None, 'Pcd %s does not in some package!' % name
+ assert pcdItem is not None, 'Pcd %s does not in some package!' % name
self._name = name
self._parent = parent
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
index 3bd0b7b58795..9ff0df385154 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
@@ -77,7 +77,7 @@ class DECSection(ini.BaseINISection):
return arr[1]
def IsArchMatch(self, arch):
- if arch == None or self.GetArch() == 'common':
+ if arch is None or self.GetArch() == 'common':
return True
if self.GetArch().lower() != arch.lower():
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
index 268ba5c3bdd0..94b6588c0ddf 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
@@ -69,7 +69,7 @@ class DoxygenAction:
self._chmCallback = None
def Log(self, message, level='info'):
- if self._log != None:
+ if self._log is not None:
self._log(message, level)
def IsVerbose(self):
@@ -94,7 +94,7 @@ class DoxygenAction:
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
indexPagePath = self.GenerateIndexPage()
- if indexPagePath == None:
+ if indexPagePath is None:
self.Log("Fail to generate index page!\n", 'error')
return False
else:
@@ -109,7 +109,7 @@ class DoxygenAction:
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
# launch doxygen tool to generate document
- if self._doxygenCallback != None:
+ if self._doxygenCallback is not None:
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
if not self._doxygenCallback(self._doxPath, configFilePath):
return False
@@ -166,9 +166,9 @@ class PackageDocumentAction(DoxygenAction):
self._configFile.AddPreDefined('MDE_CPU_ARM')
namestr = self._pObj.GetName()
- if self._arch != None:
+ if self._arch is not None:
namestr += '[%s]' % self._arch
- if self._tooltag != None:
+ if self._tooltag is not None:
namestr += '[%s]' % self._tooltag
self._configFile.SetProjectName(namestr)
self._configFile.SetStripPath(self._pObj.GetWorkspace())
@@ -314,7 +314,7 @@ class PackageDocumentAction(DoxygenAction):
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
classPage = doxygen.Page(obj.GetClassName(),
"lc_%s" % obj.GetClassName())
@@ -399,7 +399,7 @@ class PackageDocumentAction(DoxygenAction):
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
filePath = mo.groups()[0]
- if filePath == None or len(filePath) == 0:
+ if filePath is None or len(filePath) == 0:
continue
# find header file in module's path firstly.
@@ -417,7 +417,7 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(incPath):
fullPath = incPath
break
- if infObj != None:
+ if infObj is not None:
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
for obj in pkgInfObjs:
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
@@ -433,10 +433,10 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(os.path.join(incPath, filePath)):
fullPath = os.path.join(os.path.join(incPath, filePath))
break
- if fullPath != None:
+ if fullPath is not None:
break
- if fullPath == None and self.IsVerbose():
+ if fullPath is None and self.IsVerbose():
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
return
else:
@@ -477,7 +477,7 @@ class PackageDocumentAction(DoxygenAction):
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
typeRoot = typeRootPageDict[obj.GetPcdType()]
- if self._arch != None:
+ if self._arch is not None:
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
@@ -573,7 +573,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('GUID', 'guid_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
else:
@@ -626,7 +626,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
else:
@@ -680,7 +680,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
else:
@@ -773,7 +773,7 @@ class PackageDocumentAction(DoxygenAction):
if not infObj.Parse():
self.Log('Fail to load INF file %s' % inf)
continue
- if infObj.GetProduceLibraryClass() != None:
+ if infObj.GetProduceLibraryClass() is not None:
libObjs.append(infObj)
else:
modObjs.append(infObj)
@@ -951,7 +951,7 @@ class PackageDocumentAction(DoxygenAction):
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
workspace,
refDecObjs)
- if retarr != None:
+ if retarr is not None:
pkgname, hPath = retarr
else:
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
index 876da1327b26..ca55929eda9a 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
@@ -66,7 +66,7 @@ class DoxygenAction:
self._chmCallback = None
def Log(self, message, level='info'):
- if self._log != None:
+ if self._log is not None:
self._log(message, level)
def IsVerbose(self):
@@ -91,7 +91,7 @@ class DoxygenAction:
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
indexPagePath = self.GenerateIndexPage()
- if indexPagePath == None:
+ if indexPagePath is None:
self.Log("Fail to generate index page!\n", 'error')
return False
else:
@@ -106,7 +106,7 @@ class DoxygenAction:
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
# launch doxygen tool to generate document
- if self._doxygenCallback != None:
+ if self._doxygenCallback is not None:
self.Log(" >>>>>> Start doxygen process...Zzz...\n")
if not self._doxygenCallback(self._doxPath, configFilePath):
return False
@@ -167,9 +167,9 @@ class PackageDocumentAction(DoxygenAction):
self._configFile.AddPreDefined(macro)
namestr = self._pObj.GetName()
- if self._arch != None:
+ if self._arch is not None:
namestr += '[%s]' % self._arch
- if self._tooltag != None:
+ if self._tooltag is not None:
namestr += '[%s]' % self._tooltag
self._configFile.SetProjectName(namestr)
self._configFile.SetStripPath(self._pObj.GetWorkspace())
@@ -315,7 +315,7 @@ class PackageDocumentAction(DoxygenAction):
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
classPage = doxygen.Page(obj.GetClassName(),
"lc_%s" % obj.GetClassName())
@@ -401,7 +401,7 @@ class PackageDocumentAction(DoxygenAction):
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
filePath = mo.groups()[0]
- if filePath == None or len(filePath) == 0:
+ if filePath is None or len(filePath) == 0:
continue
# find header file in module's path firstly.
@@ -419,7 +419,7 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(incPath):
fullPath = incPath
break
- if infObj != None:
+ if infObj is not None:
pkgInfObjs = infObj.GetSectionObjectsByName('packages')
for obj in pkgInfObjs:
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
@@ -435,10 +435,10 @@ class PackageDocumentAction(DoxygenAction):
if os.path.exists(os.path.join(incPath, filePath)):
fullPath = os.path.join(os.path.join(incPath, filePath))
break
- if fullPath != None:
+ if fullPath is not None:
break
- if fullPath == None and self.IsVerbose():
+ if fullPath is None and self.IsVerbose():
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
return
else:
@@ -479,7 +479,7 @@ class PackageDocumentAction(DoxygenAction):
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
typeRoot = typeRootPageDict[obj.GetPcdType()]
- if self._arch != None:
+ if self._arch is not None:
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
@@ -575,7 +575,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('GUID', 'guid_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
else:
@@ -628,7 +628,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PPI', 'ppi_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
else:
@@ -682,7 +682,7 @@ class PackageDocumentAction(DoxygenAction):
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
if len(objs) == 0: return []
- if self._arch != None:
+ if self._arch is not None:
for obj in objs:
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
else:
@@ -775,7 +775,7 @@ class PackageDocumentAction(DoxygenAction):
if not infObj.Parse():
self.Log('Fail to load INF file %s' % inf)
continue
- if infObj.GetProduceLibraryClass() != None:
+ if infObj.GetProduceLibraryClass() is not None:
libObjs.append(infObj)
else:
modObjs.append(infObj)
@@ -954,7 +954,7 @@ class PackageDocumentAction(DoxygenAction):
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
workspace,
refDecObjs)
- if retarr != None:
+ if retarr is not None:
pkgname, hPath = retarr
else:
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
index f8ed5315618c..0628fa740826 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
@@ -189,7 +189,7 @@ class DSCComponentObject(DSCSectionObject):
lines.append(' <%s>\n' % key)
for name, value in self._OveridePcds[key]:
- if value != None:
+ if value is not None:
lines.append(' %s|%s\n' % (name, value))
else:
lines.append(' %s\n' % name)
diff --git a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
index 9d70fbcf97db..32b26850e766 100644
--- a/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
+++ b/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
@@ -23,7 +23,7 @@ class INFFile(ini.BaseINIFile):
def GetProduceLibraryClass(self):
obj = self.GetDefine("LIBRARY_CLASS")
- if obj == None: return None
+ if obj is None: return None
return obj.split('|')[0].strip()
@@ -59,7 +59,7 @@ class INFFile(ini.BaseINIFile):
if not ini.BaseINIFile.Parse(self):
return False
classname = self.GetProduceLibraryClass()
- if classname != None:
+ if classname is not None:
libobjdict = INFFile._libobjs
if libobjdict.has_key(classname):
if self not in libobjdict[classname]:
@@ -77,7 +77,7 @@ class INFFile(ini.BaseINIFile):
def Clear(self):
classname = self.GetProduceLibraryClass()
- if classname != None:
+ if classname is not None:
libobjdict = INFFile._libobjs
libobjdict[classname].remove(self)
if len(libobjdict[classname]) == 0:
@@ -114,7 +114,7 @@ class INFSection(ini.BaseINISection):
return arr[1]
def IsArchMatch(self, arch):
- if arch == None or self.GetArch() == 'common':
+ if arch is None or self.GetArch() == 'common':
return True
if self.GetArch().lower() != arch.lower():
@@ -258,9 +258,9 @@ class INFSourceObject(INFSectionObject):
del objdict[self.mFilename]
def IsMatchFamily(self, family):
- if family == None:
+ if family is None:
return True
- if self.mFamily != None:
+ if self.mFamily is not None:
if family.strip().lower() == self.mFamily.lower():
return True
else:
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 95e3e912b168..e54c8e66f3ad 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -765,7 +765,7 @@ class WorkspaceAutoGen(AutoGen):
for Fv in Fdf.Profile.FvDict:
_GuidDict = {}
for FfsFile in Fdf.Profile.FvDict[Fv].FfsList:
- if FfsFile.InfFileName and FfsFile.NameGuid == None:
+ if FfsFile.InfFileName and FfsFile.NameGuid is None:
#
# Get INF file GUID
#
@@ -816,7 +816,7 @@ class WorkspaceAutoGen(AutoGen):
ExtraData=self.FdfFile)
InfFoundFlag = False
- if FfsFile.NameGuid != None:
+ if FfsFile.NameGuid is not None:
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
#
@@ -938,13 +938,13 @@ class WorkspaceAutoGen(AutoGen):
## Return the directory to store FV files
def _GetFvDir(self):
- if self._FvDir == None:
+ if self._FvDir is None:
self._FvDir = path.join(self.BuildDir, 'FV')
return self._FvDir
## Return the directory to store all intermediate and final files built
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
return self.AutoGenObjectList[0].BuildDir
## Return the build output directory platform specifies
@@ -972,7 +972,7 @@ class WorkspaceAutoGen(AutoGen):
# @retval string Makefile directory
#
def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
+ if self._MakeFileDir is None:
self._MakeFileDir = self.BuildDir
return self._MakeFileDir
@@ -981,7 +981,7 @@ class WorkspaceAutoGen(AutoGen):
# @retval string Build command string
#
def _GetBuildCommand(self):
- if self._BuildCommand == None:
+ if self._BuildCommand is None:
# BuildCommand should be all the same. So just get one from platform AutoGen
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
@@ -1209,7 +1209,7 @@ class PlatformAutoGen(AutoGen):
self.VariableInfo = None
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
for Inf in self._AsBuildInfList:
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
@@ -1325,7 +1325,7 @@ class PlatformAutoGen(AutoGen):
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
- if SkuId == None or SkuId == '':
+ if SkuId is None or SkuId == '':
continue
if len(Sku.VariableName) > 0:
VariableGuidStructure = Sku.VariableGuidValue
@@ -1636,7 +1636,7 @@ class PlatformAutoGen(AutoGen):
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
NeedProcessVpdMapFile = True
- if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
+ if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
@@ -1648,7 +1648,7 @@ class PlatformAutoGen(AutoGen):
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
FoundFlag = False
for VpdPcd in VpdFile._VpdArray.keys():
# This PCD has been referenced by module
@@ -1728,7 +1728,7 @@ class PlatformAutoGen(AutoGen):
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
VpdSkuMap[DscPcd] = SkuValueMap
- if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
+ if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
@@ -1811,14 +1811,14 @@ class PlatformAutoGen(AutoGen):
BPDGToolName = ToolDef["PATH"]
break
# Call third party GUID BPDG tool.
- if BPDGToolName != None:
+ if BPDGToolName is not None:
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
## Return the platform build data object
def _GetPlatform(self):
- if self._Platform == None:
+ if self._Platform is None:
self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
return self._Platform
@@ -1836,7 +1836,7 @@ class PlatformAutoGen(AutoGen):
## Return the FDF file name
def _GetFdfFile(self):
- if self._FdfFile == None:
+ if self._FdfFile is None:
if self.Workspace.FdfFile != "":
self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
else:
@@ -1849,7 +1849,7 @@ class PlatformAutoGen(AutoGen):
## Return the directory to store all intermediate and final files built
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
if os.path.isabs(self.OutputDir):
self._BuildDir = path.join(
path.abspath(self.OutputDir),
@@ -1869,7 +1869,7 @@ class PlatformAutoGen(AutoGen):
# @retval string Makefile directory
#
def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
+ if self._MakeFileDir is None:
self._MakeFileDir = path.join(self.BuildDir, self.Arch)
return self._MakeFileDir
@@ -1878,7 +1878,7 @@ class PlatformAutoGen(AutoGen):
# @retval string Build command string
#
def _GetBuildCommand(self):
- if self._BuildCommand == None:
+ if self._BuildCommand is None:
self._BuildCommand = []
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])
@@ -1900,7 +1900,7 @@ class PlatformAutoGen(AutoGen):
# Get each tool defition for given tool chain from tools_def.txt and platform
#
def _GetToolDefinition(self):
- if self._ToolDefinitions == None:
+ if self._ToolDefinitions is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
@@ -1966,13 +1966,13 @@ class PlatformAutoGen(AutoGen):
## Return the paths of tools
def _GetToolDefFile(self):
- if self._ToolDefFile == None:
+ if self._ToolDefFile is None:
self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
return self._ToolDefFile
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
def _GetToolChainFamily(self):
- if self._ToolChainFamily == None:
+ if self._ToolChainFamily is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
@@ -1985,7 +1985,7 @@ class PlatformAutoGen(AutoGen):
return self._ToolChainFamily
def _GetBuildRuleFamily(self):
- if self._BuildRuleFamily == None:
+ if self._BuildRuleFamily is None:
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
@@ -1999,19 +1999,19 @@ class PlatformAutoGen(AutoGen):
## Return the build options specific for all modules in this platform
def _GetBuildOptions(self):
- if self._BuildOption == None:
+ if self._BuildOption is None:
self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
return self._BuildOption
## Return the build options specific for EDK modules in this platform
def _GetEdkBuildOptions(self):
- if self._EdkBuildOption == None:
+ if self._EdkBuildOption is None:
self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
return self._EdkBuildOption
## Return the build options specific for EDKII modules in this platform
def _GetEdkIIBuildOptions(self):
- if self._EdkIIBuildOption == None:
+ if self._EdkIIBuildOption is None:
self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
return self._EdkIIBuildOption
@@ -2020,7 +2020,7 @@ class PlatformAutoGen(AutoGen):
# @retval BuildRule object
#
def _GetBuildRule(self):
- if self._BuildRule == None:
+ if self._BuildRule is None:
BuildRuleFile = None
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
@@ -2040,7 +2040,7 @@ class PlatformAutoGen(AutoGen):
## Summarize the packages used by modules in this platform
def _GetPackageList(self):
- if self._PackageList == None:
+ if self._PackageList is None:
self._PackageList = set()
for La in self.LibraryAutoGenList:
self._PackageList.update(La.DependentPackageList)
@@ -2065,19 +2065,19 @@ class PlatformAutoGen(AutoGen):
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
- if self._NonDynamicPcdList == None:
+ if self._NonDynamicPcdList is None:
self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
def _GetDynamicPcdList(self):
- if self._DynamicPcdList == None:
+ if self._DynamicPcdList is None:
self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
def _GetPcdTokenNumbers(self):
- if self._PcdTokenNumber == None:
+ if self._PcdTokenNumber is None:
self._PcdTokenNumber = sdict()
TokenNumber = 1
#
@@ -2145,13 +2145,13 @@ class PlatformAutoGen(AutoGen):
## Summarize ModuleAutoGen objects of all modules to be built for this platform
def _GetModuleAutoGenList(self):
- if self._ModuleAutoGenList == None:
+ if self._ModuleAutoGenList is None:
self._GetAutoGenObjectList()
return self._ModuleAutoGenList
## Summarize ModuleAutoGen objects of all libraries to be built for this platform
def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
+ if self._LibraryAutoGenList is None:
self._GetAutoGenObjectList()
return self._LibraryAutoGenList
@@ -2215,9 +2215,9 @@ class PlatformAutoGen(AutoGen):
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
else:
LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
"Instance of library class [%s] is not found" % LibraryClassName,
File=self.MetaFile,
@@ -2227,7 +2227,7 @@ class PlatformAutoGen(AutoGen):
# for those forced library instance (NULL library), add a fake library class
if LibraryClassName.startswith("NULL"):
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
+ elif LibraryModule.LibraryClass is None \
or len(LibraryModule.LibraryClass) == 0 \
or (ModuleType != 'USER_DEFINED'
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
@@ -2243,7 +2243,7 @@ class PlatformAutoGen(AutoGen):
else:
LibraryModule = LibraryInstance[LibraryClassName]
- if LibraryModule == None:
+ if LibraryModule is None:
continue
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
@@ -2351,7 +2351,7 @@ class PlatformAutoGen(AutoGen):
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
- if FromPcd != None:
+ if FromPcd is not None:
if ToPcd.Pending and FromPcd.Type not in [None, '']:
ToPcd.Type = FromPcd.Type
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
@@ -2395,7 +2395,7 @@ class PlatformAutoGen(AutoGen):
ToPcd.validlists = FromPcd.validlists
ToPcd.expressions = FromPcd.expressions
- if FromPcd != None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
+ if FromPcd is not None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
% (ToPcd.TokenSpaceGuidCName, TokenCName))
Value = ToPcd.DefaultValue
@@ -2441,7 +2441,7 @@ class PlatformAutoGen(AutoGen):
Sku = PcdInModule.SkuInfoList[SkuId]
if Sku.VariableGuid == '': continue
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
- if Sku.VariableGuidValue == None:
+ if Sku.VariableGuidValue is None:
PackageList = "\n\t".join([str(P) for P in self.PackageList])
EdkLogger.error(
'build',
@@ -2504,12 +2504,12 @@ class PlatformAutoGen(AutoGen):
M = LibraryConsumerList.pop()
for LibraryName in M.Libraries:
Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
+ if Library is None:
for Key in self.Platform.LibraryClasses.data.keys():
if LibraryName.upper() == Key.upper():
Library = self.Platform.LibraryClasses[Key, ':dummy:']
break
- if Library == None:
+ if Library is None:
EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),
ExtraData="\t%s [%s]" % (str(Module), self.Arch))
continue
@@ -2564,13 +2564,13 @@ class PlatformAutoGen(AutoGen):
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
if (Key[0] == self.BuildRuleFamily and
- (ModuleStyle == None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
if Target == self.BuildTarget or Target == "*":
if ToolChain == self.ToolChain or ToolChain == "*":
if Arch == self.Arch or Arch == "*":
if Options[Key].startswith("="):
- if OverrideList.get(Key[1]) != None:
+ if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
@@ -2594,14 +2594,14 @@ class PlatformAutoGen(AutoGen):
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
- if Options.get((self.BuildRuleFamily, NextKey)) != None:
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
- if Options.get((self.BuildRuleFamily, NowKey)) != None:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
+ if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
@@ -2638,7 +2638,7 @@ class PlatformAutoGen(AutoGen):
return BuildOptions
for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
+ if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
@@ -2730,7 +2730,7 @@ class PlatformAutoGen(AutoGen):
BuildOptions[Tool][Attr] += " " + Value
else:
BuildOptions[Tool][Attr] = Value
- if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
+ if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:
#
# Override UNI flag only for EDK module.
#
@@ -2927,7 +2927,7 @@ class ModuleAutoGen(AutoGen):
# Macros could be used in build_rule.txt (also Makefile)
def _GetMacros(self):
- if self._Macro == None:
+ if self._Macro is None:
self._Macro = sdict()
self._Macro["WORKSPACE" ] = self.WorkspaceDir
self._Macro["MODULE_NAME" ] = self.Name
@@ -2967,7 +2967,7 @@ class ModuleAutoGen(AutoGen):
## Return the module build data object
def _GetModule(self):
- if self._Module == None:
+ if self._Module is None:
self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
return self._Module
@@ -3023,8 +3023,8 @@ class ModuleAutoGen(AutoGen):
## Check if the module is library or not
def _IsLibrary(self):
- if self._LibraryFlag == None:
- if self.Module.LibraryClass != None and self.Module.LibraryClass != []:
+ if self._LibraryFlag is None:
+ if self.Module.LibraryClass is not None and self.Module.LibraryClass != []:
self._LibraryFlag = True
else:
self._LibraryFlag = False
@@ -3036,7 +3036,7 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store intermediate files of the module
def _GetBuildDir(self):
- if self._BuildDir == None:
+ if self._BuildDir is None:
self._BuildDir = path.join(
self.PlatformInfo.BuildDir,
self.Arch,
@@ -3048,15 +3048,15 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store the intermediate object files of the mdoule
def _GetOutputDir(self):
- if self._OutputDir == None:
+ if self._OutputDir is None:
self._OutputDir = path.join(self.BuildDir, "OUTPUT")
CreateDirectory(self._OutputDir)
return self._OutputDir
## Return the directory to store ffs file
def _GetFfsOutputDir(self):
- if self._FfsOutputDir == None:
- if GlobalData.gFdfParser != None:
+ if self._FfsOutputDir is None:
+ if GlobalData.gFdfParser is not None:
self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name)
else:
self._FfsOutputDir = ''
@@ -3064,21 +3064,21 @@ class ModuleAutoGen(AutoGen):
## Return the directory to store auto-gened source files of the mdoule
def _GetDebugDir(self):
- if self._DebugDir == None:
+ if self._DebugDir is None:
self._DebugDir = path.join(self.BuildDir, "DEBUG")
CreateDirectory(self._DebugDir)
return self._DebugDir
## Return the path of custom file
def _GetCustomMakefile(self):
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
for Type in self.Module.CustomMakefile:
if Type in gMakeTypeMap:
MakeType = gMakeTypeMap[Type]
else:
MakeType = 'nmake'
- if self.SourceOverrideDir != None:
+ if self.SourceOverrideDir is not None:
File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])
if not os.path.exists(File):
File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
@@ -3179,7 +3179,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The token list of the dependency expression after parsed
#
def _GetDepexTokenList(self):
- if self._DepexList == None:
+ if self._DepexList is None:
self._DepexList = {}
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return self._DepexList
@@ -3215,7 +3215,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The token list of the dependency expression after parsed
#
def _GetDepexExpressionTokenList(self):
- if self._DepexExpressionList == None:
+ if self._DepexExpressionList is None:
self._DepexExpressionList = {}
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return self._DepexExpressionList
@@ -3283,7 +3283,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The dict containing valid options
#
def _GetModuleBuildOption(self):
- if self._BuildOption == None:
+ if self._BuildOption is None:
self._BuildOption, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
if self.BuildRuleOrder:
self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
@@ -3294,7 +3294,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The include path list
#
def _GetBuildOptionIncPathList(self):
- if self._BuildOptionIncPathList == None:
+ if self._BuildOptionIncPathList is None:
#
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
# is the former use /I , the Latter used -I to specify include directories
@@ -3355,7 +3355,7 @@ class ModuleAutoGen(AutoGen):
# $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
#
def _GetSourceFileList(self):
- if self._SourceFileList == None:
+ if self._SourceFileList is None:
self._SourceFileList = []
for F in self.Module.Sources:
# match tool chain
@@ -3408,7 +3408,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of unicode files
def _GetUnicodeFileList(self):
- if self._UnicodeFileList == None:
+ if self._UnicodeFileList is None:
if TAB_UNICODE_FILE in self.FileTypes:
self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]
else:
@@ -3417,7 +3417,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of vfr files
def _GetVfrFileList(self):
- if self._VfrFileList == None:
+ if self._VfrFileList is None:
if TAB_VFR_FILE in self.FileTypes:
self._VfrFileList = self.FileTypes[TAB_VFR_FILE]
else:
@@ -3426,7 +3426,7 @@ class ModuleAutoGen(AutoGen):
## Return the list of Image Definition files
def _GetIdfFileList(self):
- if self._IdfFileList == None:
+ if self._IdfFileList is None:
if TAB_IMAGE_FILE in self.FileTypes:
self._IdfFileList = self.FileTypes[TAB_IMAGE_FILE]
else:
@@ -3440,7 +3440,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of files which can be built later
#
def _GetBinaryFiles(self):
- if self._BinaryFileList == None:
+ if self._BinaryFileList is None:
self._BinaryFileList = []
for F in self.Module.Binaries:
if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:
@@ -3450,7 +3450,7 @@ class ModuleAutoGen(AutoGen):
return self._BinaryFileList
def _GetBuildRules(self):
- if self._BuildRules == None:
+ if self._BuildRules is None:
BuildRules = {}
BuildRuleDatabase = self.PlatformInfo.BuildRule
for Type in BuildRuleDatabase.FileTypeList:
@@ -3477,7 +3477,7 @@ class ModuleAutoGen(AutoGen):
return self._BuildRules
def _ApplyBuildRule(self, File, FileType):
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = {}
@@ -3502,7 +3502,7 @@ class ModuleAutoGen(AutoGen):
if Source != File:
CreateDirectory(Source.Dir)
- if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
+ if File.IsBinary and File == Source and self._BinaryFileList is not None and File in self._BinaryFileList:
# Skip all files that are not binary libraries
if not self.IsLibrary:
continue
@@ -3554,7 +3554,7 @@ class ModuleAutoGen(AutoGen):
FileType = TAB_UNKNOWN_FILE
def _GetTargets(self):
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = {}
@@ -3601,7 +3601,7 @@ class ModuleAutoGen(AutoGen):
if self.BuildType == 'UEFI_HII':
UniStringAutoGenC = False
IdfStringAutoGenC = False
- if self._AutoGenFileList == None:
+ if self._AutoGenFileList is None:
self._AutoGenFileList = {}
AutoGenC = TemplateString()
AutoGenH = TemplateString()
@@ -3624,29 +3624,29 @@ class ModuleAutoGen(AutoGen):
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
self._AutoGenFileList[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None:
+ if UniStringBinBuffer is not None:
UniStringBinBuffer.close()
if str(StringIdf) != "":
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
self._AutoGenFileList[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None and IdfGenBinBuffer.getvalue() != "":
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
self._AutoGenFileList[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None:
+ if IdfGenBinBuffer is not None:
IdfGenBinBuffer.close()
return self._AutoGenFileList
## Return the list of library modules explicitly or implicityly used by this module
def _GetLibraryList(self):
- if self._DependentLibraryList == None:
+ if self._DependentLibraryList is None:
# only merge library classes and PCD for non-library module
if self.IsLibrary:
self._DependentLibraryList = []
@@ -3668,7 +3668,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of PCD
#
def _GetModulePcdList(self):
- if self._ModulePcdList == None:
+ if self._ModulePcdList is None:
# apply PCD settings from platform
self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
self.UpdateComments(self._PcdComments, self.Module.PcdComments)
@@ -3679,7 +3679,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list of PCD
#
def _GetLibraryPcdList(self):
- if self._LibraryPcdList == None:
+ if self._LibraryPcdList is None:
Pcds = sdict()
if not self.IsLibrary:
# get PCDs from dependent libraries
@@ -3701,7 +3701,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between GUID cname and its value
#
def _GetGuidList(self):
- if self._GuidList == None:
+ if self._GuidList is None:
self._GuidList = sdict()
self._GuidList.update(self.Module.Guids)
for Library in self.DependentLibraryList:
@@ -3711,7 +3711,7 @@ class ModuleAutoGen(AutoGen):
return self._GuidList
def GetGuidsUsedByPcd(self):
- if self._GuidsUsedByPcd == None:
+ if self._GuidsUsedByPcd is None:
self._GuidsUsedByPcd = sdict()
self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())
for Library in self.DependentLibraryList:
@@ -3722,7 +3722,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between protocol cname and its value
#
def _GetProtocolList(self):
- if self._ProtocolList == None:
+ if self._ProtocolList is None:
self._ProtocolList = sdict()
self._ProtocolList.update(self.Module.Protocols)
for Library in self.DependentLibraryList:
@@ -3736,7 +3736,7 @@ class ModuleAutoGen(AutoGen):
# @retval dict The mapping between PPI cname and its value
#
def _GetPpiList(self):
- if self._PpiList == None:
+ if self._PpiList is None:
self._PpiList = sdict()
self._PpiList.update(self.Module.Ppis)
for Library in self.DependentLibraryList:
@@ -3750,7 +3750,7 @@ class ModuleAutoGen(AutoGen):
# @retval list The list path
#
def _GetIncludePathList(self):
- if self._IncludePathList == None:
+ if self._IncludePathList is None:
self._IncludePathList = []
if self.AutoGenVersion < 0x00010005:
for Inc in self.Module.Includes:
@@ -3942,7 +3942,7 @@ class ModuleAutoGen(AutoGen):
return
# Skip the following code for modules with no source files
- if self.SourceFileList == None or self.SourceFileList == []:
+ if self.SourceFileList is None or self.SourceFileList == []:
return
# Skip the following code for modules without any binary files
@@ -4157,7 +4157,7 @@ class ModuleAutoGen(AutoGen):
HexFormat = '0x%016x'
PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
else:
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
)
@@ -4437,7 +4437,7 @@ class ModuleAutoGen(AutoGen):
## Summarize the ModuleAutoGen objects of all libraries used by this module
def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
+ if self._LibraryAutoGenList is None:
self._LibraryAutoGenList = []
for Library in self.DependentLibraryList:
La = ModuleAutoGen(
@@ -4525,7 +4525,7 @@ class ModuleAutoGen(AutoGen):
return True
def GetTimeStampPath(self):
- if self._TimeStampPath == None:
+ if self._TimeStampPath is None:
self._TimeStampPath = os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
return self._TimeStampPath
def CreateTimeStamp(self, Makefile):
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
index 63ed47d94bcb..0daed7da610d 100644
--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
+++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -346,12 +346,12 @@ class BuildRule:
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]):
self.RuleFile = File
# Read build rules from file if it's not none
- if File != None:
+ if File is not None:
try:
self.RuleContent = open(File, 'r').readlines()
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
- elif Content != None:
+ elif Content is not None:
self.RuleContent = Content
else:
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
@@ -478,7 +478,7 @@ class BuildRule:
EdkLogger.error("build", FORMAT_INVALID, "No file type given",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
- if self._FileTypePattern.match(FileType) == None:
+ if self._FileTypePattern.match(FileType) is None:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch
@@ -561,7 +561,7 @@ class BuildRule:
FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
for ToolChainFamily in self._FamilyList:
InputFiles = self._RuleInfo[ToolChainFamily, self._State]
- if InputFiles == None:
+ if InputFiles is None:
InputFiles = []
self._RuleInfo[ToolChainFamily, self._State] = InputFiles
InputFiles.extend(FileList)
@@ -573,7 +573,7 @@ class BuildRule:
def ParseCommon(self, LineIndex):
for ToolChainFamily in self._FamilyList:
Items = self._RuleInfo[ToolChainFamily, self._State]
- if Items == None:
+ if Items is None:
Items = []
self._RuleInfo[ToolChainFamily, self._State] = Items
Items.append(self.RuleContent[LineIndex])
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py
index 481c4dda1447..cca6c8ab4fb5 100644
--- a/BaseTools/Source/Python/AutoGen/GenC.py
+++ b/BaseTools/Source/Python/AutoGen/GenC.py
@@ -1085,7 +1085,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if not Value.endswith('U'):
Value += 'U'
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']:
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
@@ -1122,7 +1122,7 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN', 'VOID*']:
# handle structure PCD
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
diff --git a/BaseTools/Source/Python/AutoGen/GenDepex.py b/BaseTools/Source/Python/AutoGen/GenDepex.py
index 7aa22bd944a0..9acea8f6bfed 100644
--- a/BaseTools/Source/Python/AutoGen/GenDepex.py
+++ b/BaseTools/Source/Python/AutoGen/GenDepex.py
@@ -360,7 +360,7 @@ class DependencyExpression:
FilePath = ""
FileChangeFlag = True
- if File == None:
+ if File is None:
sys.stdout.write(Buffer.getvalue())
FilePath = "STDOUT"
else:
@@ -414,13 +414,13 @@ def Main():
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Option.verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
try:
- if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
+ if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
DxsFile = ''
@@ -437,7 +437,7 @@ def Main():
EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
- if Option.OutputFile != None:
+ if Option.OutputFile is not None:
FileChangeFlag = Dpx.Generate(Option.OutputFile)
if not FileChangeFlag and DxsFile:
#
@@ -450,7 +450,7 @@ def Main():
Dpx.Generate()
except BaseException, X:
EdkLogger.quiet("")
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet(traceback.format_exc())
else:
EdkLogger.quiet(str(X))
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
index 60bd625cd2b1..dcdfcca1a5b0 100644
--- a/BaseTools/Source/Python/AutoGen/GenMake.py
+++ b/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -906,12 +906,12 @@ cleanlib:
# skip non-C files
if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":
continue
- elif DepSet == None:
+ elif DepSet is None:
DepSet = set(self.FileDependency[File])
else:
DepSet &= set(self.FileDependency[File])
# in case nothing in SourceFileList
- if DepSet == None:
+ if DepSet is None:
DepSet = set()
#
# Extract common files list in the dependency files
@@ -1516,7 +1516,7 @@ class TopLevelMakefile(BuildFile):
# TRICK: for not generating GenFds call in makefile if no FDF file
MacroList = []
- if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "":
+ if PlatformInfo.FdfFile is not None and PlatformInfo.FdfFile != "":
FdfFileList = [PlatformInfo.FdfFile]
# macros passed to GenFds
MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource.replace('\\', '\\\\')))
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
index e4d7f3b759a9..a2c4fb39ec85 100644
--- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py
+++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -1234,7 +1234,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
- if SkuId == None or SkuId == '':
+ if SkuId is None or SkuId == '':
continue
diff --git a/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/BaseTools/Source/Python/AutoGen/IdfClassObject.py
index d6d4703370aa..cb72219b40d5 100644
--- a/BaseTools/Source/Python/AutoGen/IdfClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/IdfClassObject.py
@@ -76,7 +76,7 @@ class IdfFileClassObject(object):
self.LoadIdfFile(File)
def LoadIdfFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
self.File = File
@@ -106,7 +106,7 @@ class IdfFileClassObject(object):
if Len == 4 and LineDetails[2] != 'TRANSPARENT':
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', LineDetails[1], re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(LineDetails[1]):
+ if MatchString is None or MatchString.end(0) != len(LineDetails[1]):
EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
if LineDetails[1] not in self.ImageIDList:
self.ImageIDList.append(LineDetails[1])
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py
index 9c7dd1e40374..73af1214eb0a 100644
--- a/BaseTools/Source/Python/AutoGen/StrGather.py
+++ b/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -150,7 +150,7 @@ def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Name = StringItem.StringName
Token = StringItem.Token
Referenced = StringItem.Referenced
- if Name != None:
+ if Name is not None:
Line = ''
if Referenced == True:
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
@@ -478,11 +478,11 @@ def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
# @retval FileList: A list of all files found
#
def GetFileList(SourceFileList, IncludeList, SkipList):
- if IncludeList == None:
+ if IncludeList is None:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
FileList = []
- if SkipList == None:
+ if SkipList is None:
SkipList = []
for File in SourceFileList:
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
index 856d19cda270..27644815dd38 100644
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -123,7 +123,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = LangConvTable.get(LangName.lower())
- if TempLangName != None:
+ if TempLangName is not None:
return TempLangName
return LangName
else:
@@ -135,7 +135,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if LangName.isalpha():
return LangName
elif length == 3:
- if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
@@ -143,7 +143,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
- if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
@@ -194,14 +194,14 @@ class StringDefClassObject(object):
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
- if Name != None:
+ if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
- if Value != None:
+ if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
- if Token != None:
+ if Token is not None:
self.Token = Token
def __str__(self):
@@ -212,7 +212,7 @@ class StringDefClassObject(object):
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
- if Value != None:
+ if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
@@ -351,7 +351,7 @@ class UniFileClassObject(object):
# Check the string name
if Name != '':
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)):
@@ -465,7 +465,7 @@ class UniFileClassObject(object):
# Load a .uni file
#
def LoadUniFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
self.File = File
#
@@ -521,7 +521,7 @@ class UniFileClassObject(object):
# Check the string name
if not self.IsCompatibleMode and Name != '':
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value)
continue
@@ -577,7 +577,7 @@ class UniFileClassObject(object):
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
- if Value != None:
+ if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
diff --git a/BaseTools/Source/Python/BPDG/BPDG.py b/BaseTools/Source/Python/BPDG/BPDG.py
index b1e328ff3f11..6c8f89f5d12b 100644
--- a/BaseTools/Source/Python/BPDG/BPDG.py
+++ b/BaseTools/Source/Python/BPDG/BPDG.py
@@ -57,21 +57,21 @@ def main():
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Options.debug_level != None:
+ elif Options.debug_level is not None:
EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Options.bin_filename == None:
+ if Options.bin_filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
- if Options.filename == None:
+ if Options.filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
- if Options.opt_force != None:
+ if Options.opt_force is not None:
Force = True
- if (Args[0] != None) :
+ if (Args[0] is not None) :
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
diff --git a/BaseTools/Source/Python/BPDG/GenVpd.py b/BaseTools/Source/Python/BPDG/GenVpd.py
index cdfc420c66f7..887240e94cb4 100644
--- a/BaseTools/Source/Python/BPDG/GenVpd.py
+++ b/BaseTools/Source/Python/BPDG/GenVpd.py
@@ -381,7 +381,7 @@ class GenVPD :
# Delete useless lines
while (True) :
try :
- if (self.FileLinesList[count] == None) :
+ if (self.FileLinesList[count] is None) :
del(self.FileLinesList[count])
else :
count += 1
@@ -398,7 +398,7 @@ class GenVPD :
# Process the pcds one by one base on the pcd's value and size
count = 0
for line in self.FileLinesList:
- if line != None :
+ if line is not None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName)
# Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ')
diff --git a/BaseTools/Source/Python/Common/DecClassObject.py b/BaseTools/Source/Python/Common/DecClassObject.py
index d7c70a7336a0..835dbd5935d2 100644
--- a/BaseTools/Source/Python/Common/DecClassObject.py
+++ b/BaseTools/Source/Python/Common/DecClassObject.py
@@ -116,7 +116,7 @@ class Dec(DecObject):
#
# Load Dec file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadDecFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/Dictionary.py b/BaseTools/Source/Python/Common/Dictionary.py
index 1c33fefabf98..f653275ff13f 100644
--- a/BaseTools/Source/Python/Common/Dictionary.py
+++ b/BaseTools/Source/Python/Common/Dictionary.py
@@ -54,7 +54,7 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit
# @param Dict: The dictionary to be printed
#
def printDict(Dict):
- if Dict != None:
+ if Dict is not None:
KeyList = Dict.keys()
for Key in KeyList:
if Dict[Key] != '':
diff --git a/BaseTools/Source/Python/Common/DscClassObject.py b/BaseTools/Source/Python/Common/DscClassObject.py
index c2fa1c275a2d..b98dbf57229b 100644
--- a/BaseTools/Source/Python/Common/DscClassObject.py
+++ b/BaseTools/Source/Python/Common/DscClassObject.py
@@ -128,7 +128,7 @@ class Dsc(DscObject):
#
# Load Dsc file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadDscFile(Filename)
#
@@ -902,7 +902,7 @@ class Dsc(DscObject):
#
def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName='', VariableGuid='', VariableOffset='', HiiDefaultValue='', VpdOffset='', DefaultValue=''):
SkuNameList = GetSplitValueList(SkuNameList)
- if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:
+ if SkuNameList is None or SkuNameList == [] or SkuNameList == ['']:
SkuNameList = ['DEFAULT']
SkuInfoList = {}
for Item in SkuNameList:
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspace.py b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
index f22a545b77ce..c14b4eb52d50 100644
--- a/BaseTools/Source/Python/Common/EdkIIWorkspace.py
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
@@ -38,7 +38,7 @@ class EdkIIWorkspace:
#
# Check environment valiable 'WORKSPACE'
#
- if os.environ.get('WORKSPACE') == None:
+ if os.environ.get('WORKSPACE') is None:
print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
return False
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
index d6df01d4ce06..c0966d526519 100644
--- a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
@@ -93,7 +93,7 @@ class PcdClassObject(object):
# @retval True The two pcds are the same
#
def __eq__(self, Other):
- return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+ return Other is not None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
## Override __hash__ function
#
@@ -121,7 +121,7 @@ class LibraryClassObject(object):
def __init__(self, Name = None, SupModList = [], Type = None):
self.LibraryClass = Name
self.SupModList = SupModList
- if Type != None:
+ if Type is not None:
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
## ModuleBuildClassObject
@@ -864,7 +864,7 @@ class WorkspaceBuild(object):
for Libs in Pb.LibraryClass:
for Type in Libs.SupModList:
Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)
- if Instance == None:
+ if Instance is None:
Instance = RecommendedInstance
Pb.LibraryClasses[(Lib, Type)] = Instance
else:
@@ -872,7 +872,7 @@ class WorkspaceBuild(object):
# For Module
#
Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)
- if Instance == None:
+ if Instance is None:
Instance = RecommendedInstance
Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance
@@ -912,7 +912,7 @@ class WorkspaceBuild(object):
if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):
continue
Module = self.Build[Arch].ModuleDatabase[Inf]
- if Module.LibraryClass == None or Module.LibraryClass == []:
+ if Module.LibraryClass is None or Module.LibraryClass == []:
self.UpdateLibrariesOfModule(Platform, Module, Arch)
for Key in Module.LibraryClasses:
Lib = Module.LibraryClasses[Key]
@@ -969,15 +969,15 @@ class WorkspaceBuild(object):
continue
LibraryClassName = Key[0]
- if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:
- if LibraryPath == None or LibraryPath == "":
+ if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] is None:
+ if LibraryPath is None or LibraryPath == "":
LibraryInstance[LibraryClassName] = None
continue
LibraryModule = ModuleDatabase[LibraryPath]
LibraryInstance[LibraryClassName] = LibraryModule
LibraryConsumerList.append(LibraryModule)
EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))
- elif LibraryPath == None or LibraryPath == "":
+ elif LibraryPath is None or LibraryPath == "":
continue
else:
LibraryModule = LibraryInstance[LibraryClassName]
@@ -1002,7 +1002,7 @@ class WorkspaceBuild(object):
Q = []
for LibraryClassName in LibraryInstance:
M = LibraryInstance[LibraryClassName]
- if M == None:
+ if M is None:
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
"Library instance for library class [%s] is not found" % LibraryClassName,
ExtraData="\t%s [%s]" % (str(Module), Arch))
@@ -1011,7 +1011,7 @@ class WorkspaceBuild(object):
# check if there're duplicate library classes
#
for Lc in M.LibraryClass:
- if Lc.SupModList != None and ModuleType not in Lc.SupModList:
+ if Lc.SupModList is not None and ModuleType not in Lc.SupModList:
EdkLogger.error("AutoGen", AUTOGEN_ERROR,
"Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),
ExtraData="\t%s" % str(Module))
@@ -1380,7 +1380,7 @@ class WorkspaceBuild(object):
if (Name, Guid) in Pcds:
OwnerPlatform = Dsc
Pcd = Pcds[(Name, Guid)]
- if Pcd.Type != '' and Pcd.Type != None:
+ if Pcd.Type != '' and Pcd.Type is not None:
NewType = Pcd.Type
if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:
NewType = DataType.TAB_PCDS_DYNAMIC
@@ -1396,13 +1396,13 @@ class WorkspaceBuild(object):
EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
- if Pcd.DatumType != '' and Pcd.DatumType != None:
+ if Pcd.DatumType != '' and Pcd.DatumType is not None:
DatumType = Pcd.DatumType
- if Pcd.TokenValue != '' and Pcd.TokenValue != None:
+ if Pcd.TokenValue != '' and Pcd.TokenValue is not None:
Token = Pcd.TokenValue
- if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:
+ if Pcd.DefaultValue != '' and Pcd.DefaultValue is not None:
Value = Pcd.DefaultValue
- if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:
+ if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize is not None:
MaxDatumSize = Pcd.MaxDatumSize
SkuInfoList = Pcd.SkuInfoList
diff --git a/BaseTools/Source/Python/Common/EdkLogger.py b/BaseTools/Source/Python/Common/EdkLogger.py
index ac1c8edc4fe2..3f462df49ada 100644
--- a/BaseTools/Source/Python/Common/EdkLogger.py
+++ b/BaseTools/Source/Python/Common/EdkLogger.py
@@ -89,7 +89,7 @@ def debug(Level, Message, ExtraData=None):
"msg" : Message,
}
- if ExtraData != None:
+ if ExtraData is not None:
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DebugMessageTemplate % TemplateDict
@@ -119,10 +119,10 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
return
# if no tool name given, use caller's source file name as tool name
- if ToolName == None or ToolName == "":
+ if ToolName is None or ToolName == "":
ToolName = os.path.basename(traceback.extract_stack()[-2][0])
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
@@ -134,12 +134,12 @@ def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
"msg" : Message,
}
- if File != None:
+ if File is not None:
LogText = _WarningMessageTemplate % TemplateDict
else:
LogText = _WarningMessageTemplateWithoutFile % TemplateDict
- if ExtraData != None:
+ if ExtraData is not None:
LogText += "\n %s" % ExtraData
_InfoLogger.log(WARN, LogText)
@@ -168,18 +168,18 @@ info = _InfoLogger.info
# it's True. This is the default behavior.
#
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
- if Message == None:
+ if Message is None:
if ErrorCode in gErrorMessage:
Message = gErrorMessage[ErrorCode]
else:
Message = gErrorMessage[UNKNOWN_ERROR]
- if ExtraData == None:
+ if ExtraData is None:
ExtraData = ""
TemplateDict = {
@@ -191,7 +191,7 @@ def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=Non
"extra" : ExtraData
}
- if File != None:
+ if File is not None:
LogText = _ErrorMessageTemplate % TemplateDict
else:
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
diff --git a/BaseTools/Source/Python/Common/FdfClassObject.py b/BaseTools/Source/Python/Common/FdfClassObject.py
index 3e7d44954c88..3d37800d9ab7 100644
--- a/BaseTools/Source/Python/Common/FdfClassObject.py
+++ b/BaseTools/Source/Python/Common/FdfClassObject.py
@@ -51,7 +51,7 @@ class Fdf(FdfObject):
#
# Load Fdf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadFdfFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/FdfParserLite.py b/BaseTools/Source/Python/Common/FdfParserLite.py
index df287414db6f..496241a7b217 100644
--- a/BaseTools/Source/Python/Common/FdfParserLite.py
+++ b/BaseTools/Source/Python/Common/FdfParserLite.py
@@ -353,7 +353,7 @@ class FdfParser(object):
if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
Value = Profile.MacroValue
- if Value != None:
+ if Value is not None:
Str = Str.replace('$(' + Name + ')', Value)
MacroEnd = MacroStart + len(Value)
@@ -676,8 +676,8 @@ class FdfParser(object):
FileLineTuple = GetRealFileLine(self.FileName, Line)
if Name in InputMacroDict:
MacroValue = InputMacroDict[Name]
- if Op == None:
- if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
+ if Op is None:
+ if Value == 'Bool' and MacroValue is None or MacroValue.upper() == 'FALSE':
return False
return True
elif Op == '!=':
@@ -691,7 +691,7 @@ class FdfParser(object):
else:
return False
else:
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue is not None and MacroValue.isdigit())):
InputVal = long(Value, 0)
MacroVal = long(MacroValue, 0)
if Op == '>':
@@ -721,8 +721,8 @@ class FdfParser(object):
for Profile in AllMacroList:
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
- if Op == None:
- if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
+ if Op is None:
+ if Value == 'Bool' and Profile.MacroValue is None or Profile.MacroValue.upper() == 'FALSE':
return False
return True
elif Op == '!=':
@@ -736,7 +736,7 @@ class FdfParser(object):
else:
return False
else:
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue is not None and Profile.MacroValue.isdigit())):
InputVal = long(Value, 0)
MacroVal = long(Profile.MacroValue, 0)
if Op == '>':
@@ -932,7 +932,7 @@ class FdfParser(object):
if not self.__GetNextToken():
return False
- if RangeExpression.RegGuidPattern.match(self.__Token) != None:
+ if RangeExpression.RegGuidPattern.match(self.__Token) is not None:
return True
else:
self.__UndoToken()
@@ -1451,7 +1451,7 @@ class FdfParser(object):
pass
for Item in Obj.BlockSizeList:
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
raise Warning("expected block statement for Fd Section", self.FileName, self.CurrentLineNumber)
return True
@@ -2420,7 +2420,7 @@ class FdfParser(object):
FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()
FvImageSectionObj.Alignment = AlignValue
- if FvObj != None:
+ if FvObj is not None:
FvImageSectionObj.Fv = FvObj
FvImageSectionObj.FvName = None
else:
@@ -2940,7 +2940,7 @@ class FdfParser(object):
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
while True:
@@ -2967,7 +2967,7 @@ class FdfParser(object):
Rule.Fixed = Fixed
Rule.FileExtension = Ext
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
return Rule
@@ -3010,7 +3010,7 @@ class FdfParser(object):
Rule.Fixed = Fixed
Rule.FileName = self.__Token
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
return Rule
@@ -3147,7 +3147,7 @@ class FdfParser(object):
EfiSectionObj.KeepReloc = False
else:
EfiSectionObj.KeepReloc = True
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
else:
raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
@@ -3469,7 +3469,7 @@ class FdfParser(object):
raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)
Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
- if Pattern.match(self.__Token) == None:
+ if Pattern.match(self.__Token) is None:
raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)
CompStatementObj.CompVer = self.__Token
@@ -3542,7 +3542,7 @@ class FdfParser(object):
for elementRegion in FdObj.RegionList:
if elementRegion.RegionType == 'FV':
for elementRegionData in elementRegion.RegionDataList:
- if elementRegionData != None and elementRegionData.upper() not in FvList:
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:
FvList.append(elementRegionData.upper())
return FvList
@@ -3559,9 +3559,9 @@ class FdfParser(object):
for FfsObj in FvObj.FfsList:
if isinstance(FfsObj, FfsFileStatement.FileStatement):
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
RefFvList.append(FfsObj.FvName.upper())
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
RefFdList.append(FfsObj.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
@@ -3582,9 +3582,9 @@ class FdfParser(object):
while SectionStack != []:
SectionObj = SectionStack.pop()
if isinstance(SectionObj, FvImageSection.FvImageSection):
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
FvList.append(SectionObj.FvName.upper())
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
FvList.append(SectionObj.Fv.UiFvName.upper())
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
diff --git a/BaseTools/Source/Python/Common/InfClassObject.py b/BaseTools/Source/Python/Common/InfClassObject.py
index f24e4e41a0c1..ba43eb548471 100644
--- a/BaseTools/Source/Python/Common/InfClassObject.py
+++ b/BaseTools/Source/Python/Common/InfClassObject.py
@@ -199,7 +199,7 @@ class Inf(InfObject):
#
# Load Inf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.LoadInfFile(Filename)
#
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index 7d44fdcf8ba7..1e6e61026474 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -85,7 +85,7 @@ def _parseForXcode(lines, efifilepath, varnames):
for varname in varnames:
if varname in line:
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)
- if m != None:
+ if m is not None:
ret.append((varname, m.group(1)))
return ret
@@ -110,27 +110,27 @@ def _parseForGCC(lines, efifilepath, varnames):
# status handler
if status == 3:
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
- if m != None:
+ if m is not None:
sections.append(m.groups(0))
for varname in varnames:
Str = ''
m = re.match("^.data.(%s)" % varname, line)
- if m != None:
+ if m is not None:
m = re.match(".data.(%s)$" % varname, line)
- if m != None:
+ if m is not None:
Str = lines[index + 1]
else:
Str = line[len(".data.%s" % varname):]
if Str:
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip())
- if m != None:
+ if m is not None:
varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
if not varoffset:
return []
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return []
#redirection
redirection = 0
@@ -166,19 +166,19 @@ def _parseGeneral(lines, efifilepath, varnames):
continue
if status == 1 and len(line) != 0:
m = secRe.match(line)
- assert m != None, "Fail to parse the section in map file , line is %s" % line
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
for varname in varnames:
m = symRe.match(line)
- assert m != None, "Fail to parse the symbol in map file, line is %s" % line
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
m2 = re.match('^[_]*(%s)' % varname, sym_name)
- if m2 != None:
+ if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
@@ -188,7 +188,7 @@ def _parseGeneral(lines, efifilepath, varnames):
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return []
ret = []
@@ -423,7 +423,7 @@ def GuidStructureStringToGuidValueName(GuidValue):
# @param Directory The directory name
#
def CreateDirectory(Directory):
- if Directory == None or Directory.strip() == "":
+ if Directory is None or Directory.strip() == "":
return True
try:
if not os.access(Directory, os.F_OK):
@@ -437,7 +437,7 @@ def CreateDirectory(Directory):
# @param Directory The directory name
#
def RemoveDirectory(Directory, Recursively=False):
- if Directory == None or Directory.strip() == "" or not os.path.exists(Directory):
+ if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):
return
if Recursively:
CurrentDirectory = os.getcwd()
@@ -540,7 +540,7 @@ def DataDump(Data, File):
except:
EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False)
finally:
- if Fd != None:
+ if Fd is not None:
Fd.close()
## Restore a Python object from a file
@@ -560,7 +560,7 @@ def DataRestore(File):
EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e)))
Data = None
finally:
- if Fd != None:
+ if Fd is not None:
Fd.close()
return Data
@@ -668,7 +668,7 @@ def GetFiles(Root, SkipList=None, FullPath=True):
# @retval False if file doesn't exists
#
def ValidFile(File, Ext=None):
- if Ext != None:
+ if Ext is not None:
Dummy, FileExt = os.path.splitext(File)
if FileExt.lower() != Ext.lower():
return False
@@ -715,13 +715,13 @@ def RealPath2(File, Dir='', OverrideDir=''):
#
def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
NewFile = File
- if Ext != None:
+ if Ext is not None:
Dummy, FileExt = os.path.splitext(File)
if FileExt.lower() != Ext.lower():
return False, File
# Replace the Edk macros
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
if OverrideDir.find('$(EFI_SOURCE)') > -1:
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
if OverrideDir.find('$(EDK_SOURCE)') > -1:
@@ -737,19 +737,19 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
NewFile = File.replace('$(EFI_SOURCE)', EfiSource)
NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource)
NewFile = AllFiles[os.path.normpath(NewFile)]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
# Second check the path with override value
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
# Last check the path with normal definitions
File = os.path.join(Dir, File)
NewFile = AllFiles[os.path.normpath(File)]
- if NewFile != None:
+ if NewFile is not None:
return True, NewFile
return False, File
@@ -759,7 +759,7 @@ def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='
#
def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
# Replace the Edk macros
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
if OverrideDir.find('$(EFI_SOURCE)') > -1:
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
if OverrideDir.find('$(EDK_SOURCE)') > -1:
@@ -781,23 +781,23 @@ def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.'
File = File.replace('$(EFI_SOURCE)', EfiSource)
File = File.replace('$(EDK_SOURCE)', EdkSource)
NewFile = AllFiles[os.path.normpath(File)]
- if NewFile != None:
+ if NewFile is not None:
NewRelaPath = os.path.dirname(NewFile)
File = os.path.basename(NewFile)
#NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
break
# Second check the path with override value
- if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir != '' and OverrideDir is not None:
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
- if NewFile != None:
+ if NewFile is not None:
#NewRelaPath = os.path.dirname(NewFile)
NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
break
# Last check the path with normal definitions
NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))]
- if NewFile != None:
+ if NewFile is not None:
break
# No file found
@@ -1062,7 +1062,7 @@ class Progressor:
self.CodaMessage = CloseMessage
self.ProgressChar = ProgressChar
self.Interval = Interval
- if Progressor._StopFlag == None:
+ if Progressor._StopFlag is None:
Progressor._StopFlag = threading.Event()
## Start to print progress charater
@@ -1070,10 +1070,10 @@ class Progressor:
# @param OpenMessage The string printed before progress charaters
#
def Start(self, OpenMessage=None):
- if OpenMessage != None:
+ if OpenMessage is not None:
self.PromptMessage = OpenMessage
Progressor._StopFlag.clear()
- if Progressor._ProgressThread == None:
+ if Progressor._ProgressThread is None:
Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
Progressor._ProgressThread.setDaemon(False)
Progressor._ProgressThread.start()
@@ -1084,7 +1084,7 @@ class Progressor:
#
def Stop(self, CloseMessage=None):
OriginalCodaMessage = self.CodaMessage
- if CloseMessage != None:
+ if CloseMessage is not None:
self.CodaMessage = CloseMessage
self.Abort()
self.CodaMessage = OriginalCodaMessage
@@ -1107,9 +1107,9 @@ class Progressor:
## Abort the progress display
@staticmethod
def Abort():
- if Progressor._StopFlag != None:
+ if Progressor._StopFlag is not None:
Progressor._StopFlag.set()
- if Progressor._ProgressThread != None:
+ if Progressor._ProgressThread is not None:
Progressor._ProgressThread.join()
Progressor._ProgressThread = None
@@ -1228,7 +1228,7 @@ class sdict(IterableUserDict):
return key, value
def update(self, dict=None, **kwargs):
- if dict != None:
+ if dict is not None:
for k, v in dict.items():
self[k] = v
if len(kwargs):
@@ -1301,7 +1301,7 @@ class tdict:
if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
- if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
+ if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:
FirstKey = self._Wildcard
if self._Single_:
@@ -1316,24 +1316,24 @@ class tdict:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
- if Value == None:
+ if Value is None:
for Key in self.data:
Value = self.data[Key][RestKeys]
- if Value != None: break
+ if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
- if Value == None and self._Wildcard in self.data:
+ if Value is None and self._Wildcard in self.data:
#print "Value=None"
Value = self.data[self._Wildcard][RestKeys]
else:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey]
- if Value == None:
+ if Value is None:
for Key in self.data:
Value = self.data[Key]
- if Value != None: break
+ if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey]
@@ -2066,7 +2066,7 @@ class PathClass(object):
return hash(self.Path)
def _GetFileKey(self):
- if self._Key == None:
+ if self._Key is None:
self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target
return self._Key
diff --git a/BaseTools/Source/Python/Common/Parsing.py b/BaseTools/Source/Python/Common/Parsing.py
index 584fc7f3c3a0..d199d1e40d8e 100644
--- a/BaseTools/Source/Python/Common/Parsing.py
+++ b/BaseTools/Source/Python/Common/Parsing.py
@@ -299,7 +299,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
- if TokenInfoString != '' and TokenInfoString != None:
+ if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
@@ -550,7 +550,7 @@ def GetComponents(Lines, Key, KeyValues, CommentCharacter):
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
- if Line == None or Line == '':
+ if Line is None or Line == '':
continue
if findBlock == False:
diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py
index 696be4c1f0b2..5dc5b85dc5a4 100644
--- a/BaseTools/Source/Python/Common/String.py
+++ b/BaseTools/Source/Python/Common/String.py
@@ -634,7 +634,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
# @retval True The file type is correct
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper():
ContainerFile = open(ContainerFilename, 'r').read()
@@ -662,7 +662,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
index 387e51523097..0ba7725dd5b5 100644
--- a/BaseTools/Source/Python/Common/TargetTxtClassObject.py
+++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
@@ -45,7 +45,7 @@ class TargetTxtClassObject(object):
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
}
self.ConfDirectoryPath = ""
- if Filename != None:
+ if Filename is not None:
self.LoadTargetTxtFile(Filename)
## LoadTargetTxtFile
@@ -83,7 +83,7 @@ class TargetTxtClassObject(object):
self.ConfDirectoryPath = os.path.dirname(FileName)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
- if F != None:
+ if F is not None:
F.close()
for Line in F:
@@ -144,7 +144,7 @@ class TargetTxtClassObject(object):
# @param Dict: The dictionary to be printed
#
def printDict(Dict):
- if Dict != None:
+ if Dict is not None:
KeyList = Dict.keys()
for Key in KeyList:
if Dict[Key] != '':
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py
index dc90b4783f2f..1ab848f1ec68 100644
--- a/BaseTools/Source/Python/Common/ToolDefClassObject.py
+++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py
@@ -53,7 +53,7 @@ class ToolDefClassObject(object):
for Env in os.environ:
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
- if FileName != None:
+ if FileName is not None:
self.LoadToolDefFile(FileName)
## LoadToolDefFile
diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py
index 716155e96d29..b1baf06b9ccd 100644
--- a/BaseTools/Source/Python/Common/VpdInfoFile.py
+++ b/BaseTools/Source/Python/Common/VpdInfoFile.py
@@ -89,7 +89,7 @@ class VpdInfoFile:
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, skuname,Offset):
- if (Vpd == None):
+ if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"):
@@ -100,7 +100,7 @@ class VpdInfoFile:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
- if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
+ if Vpd.MaxDatumSize is None or Vpd.MaxDatumSize == "":
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= 0:
@@ -122,7 +122,7 @@ class VpdInfoFile:
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
- if not (FilePath != None or len(FilePath) != 0):
+ if not (FilePath is not None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
@@ -227,8 +227,8 @@ class VpdInfoFile:
# @param VpdFileName The string path name for VPD information guid.txt
#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
- assert ToolPath != None, "Invalid parameter ToolPath"
- assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
+ assert ToolPath is not None, "Invalid parameter ToolPath"
+ assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
@@ -250,7 +250,7 @@ def CallExtenalBPDGTool(ToolPath, VpdFileName):
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
(out, error) = PopenObject.communicate()
print out
- while PopenObject.returncode == None :
+ while PopenObject.returncode is None :
PopenObject.wait()
if PopenObject.returncode != 0:
diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
index e6c4495c95e7..6a8262e5e964 100644
--- a/BaseTools/Source/Python/CommonDataClass/CommonClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py
@@ -44,11 +44,11 @@ def GenerateHelpText(Text, Lang):
class CommonClass(object):
def __init__(self, Usage = None, FeatureFlag = '', SupArchList = None, HelpText = ''):
self.Usage = Usage
- if self.Usage == None:
+ if self.Usage is None:
self.Usage = []
self.FeatureFlag = FeatureFlag
self.SupArchList = SupArchList
- if self.SupArchList == None:
+ if self.SupArchList is None:
self.SupArchList = []
self.HelpText = HelpText
self.HelpTextList = []
@@ -375,13 +375,13 @@ class PcdClass(CommonClass):
self.PcdCName = ''
self.Value = ''
self.Offset = ''
- if self.ValidUsage == None:
+ if self.ValidUsage is None:
self.ValidUsage = []
self.SkuInfoList = SkuInfoList
- if self.SkuInfoList == None:
+ if self.SkuInfoList is None:
self.SkuInfoList = {}
self.SupModuleList = SupModuleList
- if self.SupModuleList == None:
+ if self.SupModuleList is None:
self.SupModuleList = []
CommonClass.__init__(self)
self.PcdErrors = []
diff --git a/BaseTools/Source/Python/Ecc/CParser.py b/BaseTools/Source/Python/Ecc/CParser.py
index 41f2811430a0..94711a9a378a 100644
--- a/BaseTools/Source/Python/Ecc/CParser.py
+++ b/BaseTools/Source/Python/Ecc/CParser.py
@@ -783,14 +783,14 @@ class CParser(Parser):
if self.backtracking == 0:
- if d != None:
+ if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
- if a != None:
+ if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
@@ -920,7 +920,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if b != None:
+ if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
@@ -957,7 +957,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if t != None:
+ if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
@@ -1401,7 +1401,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if s.stop != None:
+ if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
@@ -1416,7 +1416,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if e.stop != None:
+ if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py
index 5864758950ce..27783e617b92 100644
--- a/BaseTools/Source/Python/Ecc/Check.py
+++ b/BaseTools/Source/Python/Ecc/Check.py
@@ -1299,7 +1299,7 @@ class Check(object):
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Name = Record[1].strip()
- if Name != '' and Name != None:
+ if Name != '' and Name is not None:
if Name[0] == '(':
Name = Name[1:Name.find(')')]
if Name.find('(') > -1:
diff --git a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
index 171600feebf9..fbe0c41b38b7 100644
--- a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py
@@ -301,7 +301,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
@@ -423,7 +423,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py
index 94f9a427e370..60dfc00260f1 100644
--- a/BaseTools/Source/Python/Ecc/Ecc.py
+++ b/BaseTools/Source/Python/Ecc/Ecc.py
@@ -178,7 +178,7 @@ class Ecc(object):
self.BuildMetaDataFileDatabase(SpeciDirs)
if self.ScanSourceCode:
EdkLogger.quiet("Building database for Meta Data File Done!")
- if SpeciDirs == None:
+ if SpeciDirs is None:
c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
else:
for specificDir in SpeciDirs:
@@ -195,7 +195,7 @@ class Ecc(object):
#
def BuildMetaDataFileDatabase(self, SpecificDirs = None):
ScanFolders = []
- if SpecificDirs == None:
+ if SpecificDirs is None:
ScanFolders.append(EccGlobalData.gTarget)
else:
for specificDir in SpecificDirs:
@@ -346,15 +346,15 @@ class Ecc(object):
self.SetLogLevel(Options)
# Set other options
- if Options.ConfigFile != None:
+ if Options.ConfigFile is not None:
self.ConfigFile = Options.ConfigFile
- if Options.OutputFile != None:
+ if Options.OutputFile is not None:
self.OutputFile = Options.OutputFile
- if Options.ReportFile != None:
+ if Options.ReportFile is not None:
self.ReportFile = Options.ReportFile
- if Options.ExceptionFile != None:
+ if Options.ExceptionFile is not None:
self.ExceptionFile = Options.ExceptionFile
- if Options.Target != None:
+ if Options.Target is not None:
if not os.path.isdir(Options.Target):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
else:
@@ -362,15 +362,15 @@ class Ecc(object):
else:
EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
- if Options.keepdatabase != None:
+ if Options.keepdatabase is not None:
self.IsInit = False
- if Options.metadata != None and Options.sourcecode != None:
+ if Options.metadata is not None and Options.sourcecode is not None:
EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
- if Options.metadata != None:
+ if Options.metadata is not None:
self.ScanSourceCode = False
- if Options.sourcecode != None:
+ if Options.sourcecode is not None:
self.ScanMetaData = False
- if Options.folders != None:
+ if Options.folders is not None:
self.OnlyScan = True
## SetLogLevel
@@ -380,11 +380,11 @@ class Ecc(object):
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
index a27e98c9752f..6b980150f53e 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
@@ -116,7 +116,7 @@ class Table(object):
SqlCommand = """select max(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
Id = Record[0][0]
- if Id == None:
+ if Id is None:
Id = self.IdBase
return Id
@@ -191,7 +191,7 @@ class TableDataModel(Table):
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
- if Count != None and Count != 0:
+ if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
index ba478f9ecf10..34ab586084f1 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
@@ -228,7 +228,7 @@ class MetaFileParser(object):
self.Start()
# No specific ARCH or Platform given, use raw data
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._RawTable.Query(*DataInfo)
# Do post-process if necessary
@@ -564,7 +564,7 @@ class InfParser(MetaFileParser):
self._ValueList = ['','','']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
continue
#
@@ -877,7 +877,7 @@ class DscParser(MetaFileParser):
self._ValueList = ['', '', '']
self._SectionParser[SectionType](self)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
@@ -1197,7 +1197,7 @@ class DscParser(MetaFileParser):
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
@@ -1573,7 +1573,7 @@ class DecParser(MetaFileParser):
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
@@ -1932,7 +1932,7 @@ class Fdf(FdfObject):
#
# Load Fdf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
try:
self.LoadFdfFile(Filename)
except Exception:
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
index 54a3016948b1..9faa6b58b001 100644
--- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
@@ -117,9 +117,9 @@ class ModuleTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
- if Platform != None and Platform != 'COMMON':
+ if Platform is not None and Platform != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -198,7 +198,7 @@ class PackageTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -283,17 +283,17 @@ class PlatformTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Scope1 != None and Scope1 != 'COMMON':
+ if Scope1 is not None and Scope1 != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
- if Scope2 != None and Scope2 != 'COMMON':
+ if Scope2 is not None and Scope2 != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
else:
ConditionString += " AND BelongsToItem<0"
- if FromItem != None:
+ if FromItem is not None:
ConditionString += " AND FromItem=%s" % FromItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
diff --git a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
index b93588eea61a..a86f19624c44 100644
--- a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py
@@ -30,14 +30,14 @@ from Common.LongFilePathSupport import OpenLongFilePath as open
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
- if String != '' and String != None:
+ if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if type(Item) == type([]):
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
@@ -46,7 +46,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
@@ -62,7 +62,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
# @revel Nodes A list of XML nodes matching XPath style Sting.
#
def XmlList(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
@@ -98,7 +98,7 @@ def XmlList(Dom, String):
# @revel Node A single XML node matching XPath style Sting.
#
def XmlNode(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return ""
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py
index 35b7405e550d..a956294baa0f 100644
--- a/BaseTools/Source/Python/Ecc/c.py
+++ b/BaseTools/Source/Python/Ecc/c.py
@@ -550,7 +550,7 @@ def CollectSourceCodeDataIntoDB(RootDir):
Db.UpdateIdentifierBelongsToFunction()
def GetTableID(FullFileName, ErrorMsgList=None):
- if ErrorMsgList == None:
+ if ErrorMsgList is None:
ErrorMsgList = []
Db = GetDB()
@@ -575,7 +575,7 @@ def GetIncludeFileList(FullFileName):
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
return []
IFList = IncludeFileListDict.get(FullFileName)
- if IFList != None:
+ if IFList is not None:
return IFList
FileID = GetTableID(FullFileName)
@@ -601,12 +601,12 @@ def GetFullPathOfIncludeFile(Str, IncludePathList):
return None
def GetAllIncludeFiles(FullFileName):
- if AllIncludeFileListDict.get(FullFileName) != None:
+ if AllIncludeFileListDict.get(FullFileName) is not None:
return AllIncludeFileListDict.get(FullFileName)
FileDirName = os.path.dirname(FullFileName)
IncludePathList = IncludePathListDict.get(FileDirName)
- if IncludePathList == None:
+ if IncludePathList is None:
IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
if FileDirName not in IncludePathList:
IncludePathList.insert(0, FileDirName)
@@ -618,7 +618,7 @@ def GetAllIncludeFiles(FullFileName):
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
- if FullPath != None:
+ if FullPath is not None:
IncludeFileQueue.append(FullPath)
i = 0
@@ -629,7 +629,7 @@ def GetAllIncludeFiles(FullFileName):
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
- if FullPath != None and FullPath not in IncludeFileQueue:
+ if FullPath is not None and FullPath not in IncludeFileQueue:
IncludeFileQueue.insert(i + 1, FullPath)
i += 1
@@ -853,7 +853,7 @@ def DiffModifier(Str1, Str2):
def GetTypedefDict(FullFileName):
Dict = ComplexTypeDict.get(FullFileName)
- if Dict != None:
+ if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
@@ -898,7 +898,7 @@ def GetTypedefDict(FullFileName):
def GetSUDict(FullFileName):
Dict = SUDict.get(FullFileName)
- if Dict != None:
+ if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
@@ -983,9 +983,9 @@ def StripComments(Str):
def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
Value = TypedefDict.get(Type)
- if Value == None:
+ if Value is None:
Value = SUDict.get(Type)
- if Value == None:
+ if Value is None:
return None
LBPos = Value.find('{')
@@ -994,11 +994,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
for FT in FTList:
if FT not in ('struct', 'union'):
Value = TypedefDict.get(FT)
- if Value == None:
+ if Value is None:
Value = SUDict.get(FT)
break
- if Value == None:
+ if Value is None:
return None
LBPos = Value.find('{')
@@ -1025,11 +1025,11 @@ def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
return None
def GetRealType(Type, TypedefDict, TargetType=None):
- if TargetType != None and Type == TargetType:
+ if TargetType is not None and Type == TargetType:
return Type
while TypedefDict.get(Type):
Type = TypedefDict.get(Type)
- if TargetType != None and Type == TargetType:
+ if TargetType is not None and Type == TargetType:
return Type
return Type
@@ -1043,10 +1043,10 @@ def GetTypeInfo(RefList, Modifier, FullFileName, TargetType=None):
while Index < len(RefList):
FieldName = RefList[Index]
FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
- if FromType == None:
+ if FromType is None:
return None
# we want to determine the exact type.
- if TargetType != None:
+ if TargetType is not None:
Type = FromType.split()[0]
# we only want to check if it is a pointer
else:
@@ -1151,7 +1151,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1174,7 +1174,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
Type = TypeList[-1]
if Type == '*' and len(TypeList) >= 2:
Type = TypeList[-2]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1199,7 +1199,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1230,7 +1230,7 @@ def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetTy
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
- if len(TypeList) > 1 and StarList != None:
+ if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
@@ -1933,12 +1933,12 @@ def CheckPointerNullComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
- if PredInfo[1] == None:
+ if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
@@ -1960,7 +1960,7 @@ def CheckPointerNullComparison(FullFileName):
continue
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('*') != -1 and Type != 'BOOLEAN*':
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -1971,7 +1971,7 @@ def CheckPointerNullComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
Type = GetTypeFromArray(Type, PredVarStr)
if Type.find('*') != -1 and Type != 'BOOLEAN*':
@@ -2012,12 +2012,12 @@ def CheckNonBooleanValueComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
- if PredInfo[1] == None:
+ if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
@@ -2040,7 +2040,7 @@ def CheckNonBooleanValueComparison(FullFileName):
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -2050,7 +2050,7 @@ def CheckNonBooleanValueComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
@@ -2091,7 +2091,7 @@ def CheckBooleanValueComparison(FullFileName):
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
- if FuncRecord == None:
+ if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
@@ -2119,7 +2119,7 @@ def CheckBooleanValueComparison(FullFileName):
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
- if Type != None:
+ if Type is not None:
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
@@ -2130,7 +2130,7 @@ def CheckBooleanValueComparison(FullFileName):
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
- if Type == None:
+ if Type is None:
continue
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
diff --git a/BaseTools/Source/Python/Eot/CParser.py b/BaseTools/Source/Python/Eot/CParser.py
index 41f2811430a0..94711a9a378a 100644
--- a/BaseTools/Source/Python/Eot/CParser.py
+++ b/BaseTools/Source/Python/Eot/CParser.py
@@ -783,14 +783,14 @@ class CParser(Parser):
if self.backtracking == 0:
- if d != None:
+ if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
- if a != None:
+ if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
@@ -920,7 +920,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if b != None:
+ if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
@@ -957,7 +957,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if t != None:
+ if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
@@ -1401,7 +1401,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if s.stop != None:
+ if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
@@ -1416,7 +1416,7 @@ class CParser(Parser):
return
if self.backtracking == 0:
- if e.stop != None:
+ if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
diff --git a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
index bb78a0f882d5..b977a9d5322c 100644
--- a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
+++ b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
@@ -291,7 +291,7 @@ class CodeFragmentCollector:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
- if HashComment and PPDirectiveObj != None:
+ if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
diff --git a/BaseTools/Source/Python/Eot/Eot.py b/BaseTools/Source/Python/Eot/Eot.py
index 5029f7369d4a..c4164199acf3 100644
--- a/BaseTools/Source/Python/Eot/Eot.py
+++ b/BaseTools/Source/Python/Eot/Eot.py
@@ -579,11 +579,11 @@ class Eot(object):
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
diff --git a/BaseTools/Source/Python/Eot/FvImage.py b/BaseTools/Source/Python/Eot/FvImage.py
index 0f742c7d86c2..affca4e71e8a 100644
--- a/BaseTools/Source/Python/Eot/FvImage.py
+++ b/BaseTools/Source/Python/Eot/FvImage.py
@@ -52,7 +52,7 @@ class Image(array):
return array.__new__(cls, 'B')
def __init__(m, ID=None):
- if ID == None:
+ if ID is None:
m._ID_ = str(uuid.uuid1()).upper()
else:
m._ID_ = ID
@@ -208,7 +208,7 @@ class FirmwareVolume(Image):
return (CouldBeLoaded, DepexString, FileDepex)
def Dispatch(self, Db = None):
- if Db == None:
+ if Db is None:
return False
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
# Find PeiCore, DexCore, PeiPriori, DxePriori first
@@ -236,15 +236,15 @@ class FirmwareVolume(Image):
continue
# Parse SEC_CORE first
- if FfsSecCoreGuid != None:
+ if FfsSecCoreGuid is not None:
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
self.LoadPpi(Db, FfsSecCoreGuid)
# Parse PEI first
- if FfsPeiCoreGuid != None:
+ if FfsPeiCoreGuid is not None:
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
self.LoadPpi(Db, FfsPeiCoreGuid)
- if FfsPeiPrioriGuid != None:
+ if FfsPeiPrioriGuid is not None:
# Load PEIM described in priori file
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
if len(FfsPeiPriori.Sections) == 1:
@@ -263,10 +263,10 @@ class FirmwareVolume(Image):
self.DisPatchPei(Db)
# Parse DXE then
- if FfsDxeCoreGuid != None:
+ if FfsDxeCoreGuid is not None:
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
self.LoadProtocol(Db, FfsDxeCoreGuid)
- if FfsDxePrioriGuid != None:
+ if FfsDxePrioriGuid is not None:
# Load PEIM described in priori file
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
if len(FfsDxePriori.Sections) == 1:
@@ -383,7 +383,7 @@ class FirmwareVolume(Image):
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
- if FileDepex != None:
+ if FileDepex is not None:
ScheduleList.insert.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
else:
ScheduleList[FfsID] = NewFfs
@@ -471,7 +471,7 @@ class FirmwareVolume(Image):
FfsId = repr(FfsObj)
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
- if LastFfsObj != None:
+ if LastFfsObj is not None:
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
else:
if FfsId in self.FfsDict:
@@ -480,7 +480,7 @@ class FirmwareVolume(Image):
% (FfsObj.Guid, FfsObj.Offset,
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
self.FfsDict[FfsId] = FfsObj
- if LastFfsObj != None:
+ if LastFfsObj is not None:
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
FfsStartAddress += len(FfsObj)
@@ -527,11 +527,11 @@ class CompressedImage(Image):
def __init__(m, CompressedData=None, CompressionType=None, UncompressedLength=None):
Image.__init__(m)
- if UncompressedLength != None:
+ if UncompressedLength is not None:
m.UncompressedLength = UncompressedLength
- if CompressionType != None:
+ if CompressionType is not None:
m.CompressionType = CompressionType
- if CompressedData != None:
+ if CompressedData is not None:
m.Data = CompressedData
def __str__(m):
@@ -607,13 +607,13 @@ class GuidDefinedImage(Image):
def __init__(m, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):
Image.__init__(m)
- if SectionDefinitionGuid != None:
+ if SectionDefinitionGuid is not None:
m.SectionDefinitionGuid = SectionDefinitionGuid
- if DataOffset != None:
+ if DataOffset is not None:
m.DataOffset = DataOffset
- if Attributes != None:
+ if Attributes is not None:
m.Attributes = Attributes
- if Data != None:
+ if Data is not None:
m.Data = Data
def __str__(m):
@@ -791,7 +791,7 @@ class Depex(Image):
else:
CurrentData = m._OPCODE_
m._ExprList.append(Token)
- if CurrentData == None:
+ if CurrentData is None:
break
return m._ExprList
@@ -867,9 +867,9 @@ class Section(Image):
def __init__(m, Type=None, Size=None):
Image.__init__(m)
m._Alignment = 1
- if Type != None:
+ if Type is not None:
m.Type = Type
- if Size != None:
+ if Size is not None:
m.Size = Size
def __str__(m):
@@ -1283,7 +1283,7 @@ class LinkMap:
for Line in MapFile:
Line = Line.strip()
if not MappingStart:
- if MappingTitle.match(Line) != None:
+ if MappingTitle.match(Line) is not None:
MappingStart = True
continue
ResultList = MappingFormat.findall(Line)
diff --git a/BaseTools/Source/Python/Eot/InfParserLite.py b/BaseTools/Source/Python/Eot/InfParserLite.py
index 6bb2c5f9f1d6..8867bb5dc23a 100644
--- a/BaseTools/Source/Python/Eot/InfParserLite.py
+++ b/BaseTools/Source/Python/Eot/InfParserLite.py
@@ -52,7 +52,7 @@ class EdkInfParser(object):
self.SourceOverridePath = SourceOverridePath
# Load Inf file if filename is not None
- if Filename != None:
+ if Filename is not None:
self.LoadInfFile(Filename)
if SourceFileList:
diff --git a/BaseTools/Source/Python/Eot/Report.py b/BaseTools/Source/Python/Eot/Report.py
index 386e3eb8ec05..7435b4d7c930 100644
--- a/BaseTools/Source/Python/Eot/Report.py
+++ b/BaseTools/Source/Python/Eot/Report.py
@@ -234,7 +234,7 @@ class Report(object):
#
def GenerateFfs(self, FfsObj):
self.FfsIndex = self.FfsIndex + 1
- if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
+ if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
FfsGuid = FfsObj.Guid
FfsOffset = FfsObj._OFF_
FfsName = 'Unknown-Module'
diff --git a/BaseTools/Source/Python/GenFds/AprioriSection.py b/BaseTools/Source/Python/GenFds/AprioriSection.py
index 70e2e5a3baf2..92a74670ed25 100644
--- a/BaseTools/Source/Python/GenFds/AprioriSection.py
+++ b/BaseTools/Source/Python/GenFds/AprioriSection.py
@@ -75,11 +75,11 @@ class AprioriSection (AprioriSectionClassObject):
InfFileName = NormPath(FfsObj.InfFileName)
Arch = FfsObj.GetCurrentArch()
- if Arch != None:
+ if Arch is not None:
Dict['$(ARCH)'] = Arch
InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
- if Arch != None:
+ if Arch is not None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
Guid = Inf.Guid
diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py
index e03d78995737..9332f016f7da 100644
--- a/BaseTools/Source/Python/GenFds/Capsule.py
+++ b/BaseTools/Source/Python/GenFds/Capsule.py
@@ -159,7 +159,7 @@ class Capsule (CapsuleClassObject) :
if not os.path.isabs(fmp.ImageFile):
CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)
CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'
- if ExternalTool == None:
+ if ExternalTool is None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)
else:
CmdOption += ExternalTool
diff --git a/BaseTools/Source/Python/GenFds/CompressSection.py b/BaseTools/Source/Python/GenFds/CompressSection.py
index 56e71a35453b..08ab48669f45 100644
--- a/BaseTools/Source/Python/GenFds/CompressSection.py
+++ b/BaseTools/Source/Python/GenFds/CompressSection.py
@@ -55,7 +55,7 @@ class CompressSection (CompressSectionClassObject) :
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
- if FfsInf != None:
+ if FfsInf is not None:
self.CompType = FfsInf.__ExtendMacro__(self.CompType)
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
@@ -67,13 +67,13 @@ class CompressSection (CompressSectionClassObject) :
Index = Index + 1
SecIndex = '%s.%d' %(SecNum, Index)
ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)
- if AlignValue != None:
- if MaxAlign == None:
+ if AlignValue is not None:
+ if MaxAlign is None:
MaxAlign = AlignValue
if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
MaxAlign = AlignValue
if ReturnSectList != []:
- if AlignValue == None:
+ if AlignValue is None:
AlignValue = "1"
for FileData in ReturnSectList:
SectFiles += (FileData,)
diff --git a/BaseTools/Source/Python/GenFds/DataSection.py b/BaseTools/Source/Python/GenFds/DataSection.py
index 2d2975f75c0f..40e345eee77e 100644
--- a/BaseTools/Source/Python/GenFds/DataSection.py
+++ b/BaseTools/Source/Python/GenFds/DataSection.py
@@ -52,7 +52,7 @@ class DataSection (DataSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsFile != None:
+ if FfsFile is not None:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
else:
@@ -92,7 +92,7 @@ class DataSection (DataSectionClassObject):
NoStrip = True
if self.SecType in ('TE', 'PE32'):
- if self.KeepReloc != None:
+ if self.KeepReloc is not None:
NoStrip = self.KeepReloc
if not NoStrip:
diff --git a/BaseTools/Source/Python/GenFds/DepexSection.py b/BaseTools/Source/Python/GenFds/DepexSection.py
index 1992d2abd807..ef30a2f083c6 100644
--- a/BaseTools/Source/Python/GenFds/DepexSection.py
+++ b/BaseTools/Source/Python/GenFds/DepexSection.py
@@ -86,7 +86,7 @@ class DepexSection (DepexSectionClassObject):
for Exp in ExpList:
if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):
GuidStr = self.__FindGuidValue(Exp)
- if GuidStr == None:
+ if GuidStr is None:
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
"Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py
index 5029ec7a1823..7e6c88a0594e 100644
--- a/BaseTools/Source/Python/GenFds/EfiSection.py
+++ b/BaseTools/Source/Python/GenFds/EfiSection.py
@@ -55,10 +55,10 @@ class EfiSection (EfiSectionClassObject):
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :
- if self.FileName != None and self.FileName.startswith('PCD('):
+ if self.FileName is not None and self.FileName.startswith('PCD('):
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
"""Prepare the parameter of GenSection"""
- if FfsInf != None :
+ if FfsInf is not None :
InfFileName = FfsInf.InfFileName
SectionType = FfsInf.__ExtendMacro__(self.SectionType)
Filename = FfsInf.__ExtendMacro__(self.FileName)
@@ -66,20 +66,20 @@ class EfiSection (EfiSectionClassObject):
StringData = FfsInf.__ExtendMacro__(self.StringData)
NoStrip = True
if FfsInf.ModuleType in ('SEC', 'PEI_CORE', 'PEIM') and SectionType in ('TE', 'PE32'):
- if FfsInf.KeepReloc != None:
+ if FfsInf.KeepReloc is not None:
NoStrip = FfsInf.KeepReloc
- elif FfsInf.KeepRelocFromRule != None:
+ elif FfsInf.KeepRelocFromRule is not None:
NoStrip = FfsInf.KeepRelocFromRule
- elif self.KeepReloc != None:
+ elif self.KeepReloc is not None:
NoStrip = self.KeepReloc
- elif FfsInf.ShadowFromInfFile != None:
+ elif FfsInf.ShadowFromInfFile is not None:
NoStrip = FfsInf.ShadowFromInfFile
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
"""If the file name was pointed out, add it in FileList"""
FileList = []
- if Filename != None:
+ if Filename is not None:
Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
# check if the path is absolute or relative
if os.path.isabs(Filename):
@@ -107,14 +107,14 @@ class EfiSection (EfiSectionClassObject):
if SectionType == 'VERSION':
InfOverrideVerString = False
- if FfsInf.Version != None:
+ if FfsInf.Version is not None:
#StringData = FfsInf.Version
BuildNum = FfsInf.Version
InfOverrideVerString = True
if InfOverrideVerString:
#VerTuple = ('-n', '"' + StringData + '"')
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
@@ -136,7 +136,7 @@ class EfiSection (EfiSectionClassObject):
VerString = f.read()
f.close()
BuildNum = VerString
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
#Ui=VerString,
@@ -146,7 +146,7 @@ class EfiSection (EfiSectionClassObject):
else:
BuildNum = StringData
- if BuildNum != None and BuildNum != '':
+ if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
@@ -173,7 +173,7 @@ class EfiSection (EfiSectionClassObject):
elif SectionType == 'UI':
InfOverrideUiString = False
- if FfsInf.Ui != None:
+ if FfsInf.Ui is not None:
StringData = FfsInf.Ui
InfOverrideUiString = True
@@ -196,7 +196,7 @@ class EfiSection (EfiSectionClassObject):
Ui=UiString, IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
else:
- if StringData != None and len(StringData) > 0:
+ if StringData is not None and len(StringData) > 0:
UiTuple = ('-n', '"' + StringData + '"')
else:
UiTuple = tuple()
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index e35057931f03..51cc466ccccd 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -638,7 +638,7 @@ class FdfParser:
if not MacroVal:
if Macro in MacroDict:
MacroVal = MacroDict[Macro]
- if MacroVal != None:
+ if MacroVal is not None:
IncFileName = IncFileName.replace('$(' + Macro + ')', MacroVal, 1)
if MacroVal.find('$(') != -1:
PreIndex = StartPos
@@ -686,7 +686,7 @@ class FdfParser:
# list index of the insertion, note that line number is 'CurrentLine + 1'
InsertAtLine = CurrentLine
ParentProfile = GetParentAtLine (CurrentLine)
- if ParentProfile != None:
+ if ParentProfile is not None:
ParentProfile.IncludeFileList.insert(0, IncFileProfile)
IncFileProfile.Level = ParentProfile.Level + 1
IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
@@ -762,7 +762,7 @@ class FdfParser:
while StartPos != -1 and EndPos != -1 and self.__Token not in ['!ifdef', '!ifndef', '!if', '!elseif']:
MacroName = CurLine[StartPos+2 : EndPos]
MacorValue = self.__GetMacroValue(MacroName)
- if MacorValue != None:
+ if MacorValue is not None:
CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)
if MacorValue.find('$(') != -1:
PreIndex = StartPos
@@ -1135,7 +1135,7 @@ class FdfParser:
if not self.__GetNextToken():
return False
- if RangeExpression.RegGuidPattern.match(self.__Token) != None:
+ if RangeExpression.RegGuidPattern.match(self.__Token) is not None:
return True
else:
self.__UndoToken()
@@ -1411,7 +1411,7 @@ class FdfParser:
#'\n\tGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
# At this point, the closest parent would be the included file itself
Profile = GetParentAtLine(X.OriginalLineNumber)
- if Profile != None:
+ if Profile is not None:
X.Message += ' near line %d, column %d: %s' \
% (X.LineNumber, 0, Profile.FileLinesList[X.LineNumber-1])
else:
@@ -1539,7 +1539,7 @@ class FdfParser:
while self.__GetTokenStatements(FdObj):
pass
for Attr in ("BaseAddress", "Size", "ErasePolarity"):
- if getattr(FdObj, Attr) == None:
+ if getattr(FdObj, Attr) is None:
self.__GetNextToken()
raise Warning("Keyword %s missing" % Attr, self.FileName, self.CurrentLineNumber)
@@ -1694,7 +1694,7 @@ class FdfParser:
IsBlock = True
Item = Obj.BlockSizeList[-1]
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
return IsBlock
@@ -1862,7 +1862,7 @@ class FdfParser:
#
def __GetRegionLayout(self, Fd):
Offset = self.__CalcRegionExpr()
- if Offset == None:
+ if Offset is None:
return False
RegionObj = Region.Region()
@@ -1873,7 +1873,7 @@ class FdfParser:
raise Warning("expected '|'", self.FileName, self.CurrentLineNumber)
Size = self.__CalcRegionExpr()
- if Size == None:
+ if Size is None:
raise Warning("expected Region Size", self.FileName, self.CurrentLineNumber)
RegionObj.Size = Size
@@ -2973,7 +2973,7 @@ class FdfParser:
FvImageSectionObj = FvImageSection.FvImageSection()
FvImageSectionObj.Alignment = AlignValue
- if FvObj != None:
+ if FvObj is not None:
FvImageSectionObj.Fv = FvObj
FvImageSectionObj.FvName = None
else:
@@ -3791,7 +3791,7 @@ class FdfParser:
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
while True:
@@ -3847,7 +3847,7 @@ class FdfParser:
Rule.CheckSum = CheckSum
Rule.Fixed = Fixed
Rule.KeyStringList = KeyStringList
- if KeepReloc != None:
+ if KeepReloc is not None:
Rule.KeepReloc = KeepReloc
Rule.FileExtension = Ext
Rule.FileName = self.__Token
@@ -3986,7 +3986,7 @@ class FdfParser:
EfiSectionObj.KeepReloc = False
else:
EfiSectionObj.KeepReloc = True
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
else:
raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
@@ -4313,7 +4313,7 @@ class FdfParser:
raise Warning("expected Component version", self.FileName, self.CurrentLineNumber)
Pattern = re.compile('-$|[0-9a-fA-F]{1,2}\.[0-9a-fA-F]{1,2}$', re.DOTALL)
- if Pattern.match(self.__Token) == None:
+ if Pattern.match(self.__Token) is None:
raise Warning("Unknown version format '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
CompStatementObj.CompVer = self.__Token
@@ -4577,7 +4577,7 @@ class FdfParser:
for elementRegionData in elementRegion.RegionDataList:
if elementRegionData.endswith(".cap"):
continue
- if elementRegionData != None and elementRegionData.upper() not in CapList:
+ if elementRegionData is not None and elementRegionData.upper() not in CapList:
CapList.append(elementRegionData.upper())
return CapList
@@ -4593,15 +4593,15 @@ class FdfParser:
def __GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):
for CapsuleDataObj in CapObj.CapsuleDataList :
- if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName != None and CapsuleDataObj.FvName.upper() not in RefFvList:
+ if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:
RefFvList.append (CapsuleDataObj.FvName.upper())
- elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName != None and CapsuleDataObj.FdName.upper() not in RefFdList:
+ elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:
RefFdList.append (CapsuleDataObj.FdName.upper())
- elif CapsuleDataObj.Ffs != None:
+ elif CapsuleDataObj.Ffs is not None:
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
- if CapsuleDataObj.Ffs.FvName != None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
+ if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())
- elif CapsuleDataObj.Ffs.FdName != None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
+ elif CapsuleDataObj.Ffs.FdName is not None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)
@@ -4624,7 +4624,7 @@ class FdfParser:
for elementRegionData in elementRegion.RegionDataList:
if elementRegionData.endswith(".fv"):
continue
- if elementRegionData != None and elementRegionData.upper() not in FvList:
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:
FvList.append(elementRegionData.upper())
return FvList
@@ -4641,9 +4641,9 @@ class FdfParser:
for FfsObj in FvObj.FfsList:
if isinstance(FfsObj, FfsFileStatement.FileStatement):
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:
RefFvList.append(FfsObj.FvName.upper())
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:
RefFdList.append(FfsObj.FdName.upper())
else:
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
@@ -4664,9 +4664,9 @@ class FdfParser:
while SectionStack != []:
SectionObj = SectionStack.pop()
if isinstance(SectionObj, FvImageSection.FvImageSection):
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:
FvList.append(SectionObj.FvName.upper())
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:
FvList.append(SectionObj.Fv.UiFvName.upper())
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
index 12ec95b56501..3fd5a9c2158a 100644
--- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py
@@ -59,7 +59,7 @@ class FileStatement (FileStatementClassObject) :
#
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):
- if self.NameGuid != None and self.NameGuid.startswith('PCD('):
+ if self.NameGuid is not None and self.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -81,7 +81,7 @@ class FileStatement (FileStatementClassObject) :
Dict.update(self.DefineVarDict)
SectionAlignments = None
- if self.FvName != None :
+ if self.FvName is not None :
Buffer = StringIO.StringIO('')
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
@@ -89,14 +89,14 @@ class FileStatement (FileStatementClassObject) :
FileName = Fv.AddToBuffer(Buffer)
SectionFiles = [FileName]
- elif self.FdName != None:
+ elif self.FdName is not None:
if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
FileName = Fd.GenFd()
SectionFiles = [FileName]
- elif self.FileName != None:
+ elif self.FileName is not None:
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):
FileContent = ''
@@ -110,7 +110,7 @@ class FileStatement (FileStatementClassObject) :
Content = f.read()
f.close()
AlignValue = 1
- if self.SubAlignment[Index] != None:
+ if self.SubAlignment[Index] is not None:
AlignValue = GenFdsGlobalVariable.GetAlignment(self.SubAlignment[Index])
if AlignValue > MaxAlignValue:
MaxAlignIndex = Index
@@ -151,7 +151,7 @@ class FileStatement (FileStatementClassObject) :
section.FvAddr = FvChildAddr.pop(0)
elif isinstance(section, GuidSection):
section.FvAddr = FvChildAddr
- if FvParentAddr != None and isinstance(section, GuidSection):
+ if FvParentAddr is not None and isinstance(section, GuidSection):
section.FvParentAddr = FvParentAddr
if self.KeepReloc == False:
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index a34823391171..0dbffffc9a15 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -185,7 +185,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
InfLowerPath = str(PathClassObj).lower()
if self.OverrideGuid:
PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
- if self.CurrentArch != None:
+ if self.CurrentArch is not None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
#
@@ -194,14 +194,14 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
- if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
if Inf.AutoGenVersion < 0x00010005:
self.ModuleType = Inf.ComponentType
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
self.SourceFileList = Inf.Sources
- if self.KeepReloc == None and Inf.Shadow:
+ if self.KeepReloc is None and Inf.Shadow:
self.ShadowFromInfFile = Inf.Shadow
else:
@@ -209,7 +209,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
- if Inf.Specification != None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
+ if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
@@ -231,7 +231,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if self.ModuleType == 'MM_CORE_STANDALONE' and int(self.PiSpecVersion, 16) < 0x00010032:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
- if Inf._Defs != None and len(Inf._Defs) > 0:
+ if Inf._Defs is not None and len(Inf._Defs) > 0:
self.OptRomDefs.update(Inf._Defs)
self.PatchPcds = []
@@ -476,7 +476,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
# Allow binary type module not specify override rule in FDF file.
#
if len(self.BinFileList) > 0:
- if self.Rule == None or self.Rule == "":
+ if self.Rule is None or self.Rule == "":
self.Rule = "BINARY"
if not IsMakefile and GenFdsGlobalVariable.EnableGenfdsMultiThread and self.Rule != 'BINARY':
@@ -545,7 +545,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GetRule__ (self) :
CurrentArchList = []
- if self.CurrentArch == None:
+ if self.CurrentArch is None:
CurrentArchList = ['common']
else:
CurrentArchList.append(self.CurrentArch)
@@ -556,13 +556,13 @@ class FfsInfStatement(FfsInfStatementClassObject):
CurrentArch.upper() + \
'.' + \
self.ModuleType.upper()
- if self.Rule != None:
+ if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
- if Rule != None:
+ if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
@@ -572,7 +572,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
'.' + \
self.ModuleType.upper()
- if self.Rule != None:
+ if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
@@ -580,11 +580,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
- if Rule != None:
+ if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
- if Rule == None :
+ if Rule is None :
EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
% (RuleName, self.InfFileName))
@@ -601,7 +601,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
DscArchList = []
for Arch in GenFdsGlobalVariable.ArchList :
PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
- if PlatformDataBase != None:
+ if PlatformDataBase is not None:
if InfFileKey in PlatformDataBase.Modules:
DscArchList.append (Arch)
else:
@@ -648,7 +648,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
ArchList = CurArchList
UseArchList = TargetArchList
- if self.UseArch != None:
+ if self.UseArch is not None:
UseArchList = []
UseArchList.append(self.UseArch)
ArchList = list(set (UseArchList) & set (ArchList))
@@ -689,7 +689,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if self.OverrideGuid:
FileName = self.OverrideGuid
Arch = "NoneArch"
- if self.CurrentArch != None:
+ if self.CurrentArch is not None:
Arch = self.CurrentArch
OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
@@ -723,7 +723,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
FileList = []
OutputFileList = []
GenSecInputFile = None
- if Rule.FileName != None:
+ if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
if os.path.isabs(GenSecInputFile):
GenSecInputFile = os.path.normpath(GenSecInputFile)
@@ -748,11 +748,11 @@ class FfsInfStatement(FfsInfStatementClassObject):
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
NoStrip = True
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
- if self.KeepReloc != None:
+ if self.KeepReloc is not None:
NoStrip = self.KeepReloc
- elif Rule.KeepReloc != None:
+ elif Rule.KeepReloc is not None:
NoStrip = Rule.KeepReloc
- elif self.ShadowFromInfFile != None:
+ elif self.ShadowFromInfFile is not None:
NoStrip = self.ShadowFromInfFile
if FileList != [] :
@@ -868,7 +868,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
InputSection.append(InputFile)
SectionAlignments.append(Rule.SectAlignment)
- if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -902,7 +902,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GenComplexFileSection__(self, Rule, FvChildAddr, FvParentAddr, IsMakefile = False):
if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
- if Rule.KeepReloc != None:
+ if Rule.KeepReloc is not None:
self.KeepRelocFromRule = Rule.KeepReloc
SectFiles = []
SectAlignments = []
@@ -957,7 +957,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
Sect.FvAddr = FvChildAddr.pop(0)
elif isinstance(Sect, GuidSection):
Sect.FvAddr = FvChildAddr
- if FvParentAddr != None and isinstance(Sect, GuidSection):
+ if FvParentAddr is not None and isinstance(Sect, GuidSection):
Sect.FvParentAddr = FvParentAddr
if Rule.KeyStringList != []:
@@ -1040,7 +1040,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
def __GenComplexFileFfs__(self, Rule, InputFile, Alignments, MakefilePath = None):
- if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
@@ -1079,7 +1079,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if Rule.CheckSum != False:
result += ('-s',)
- if Rule.Alignment != None and Rule.Alignment != '':
+ if Rule.Alignment is not None and Rule.Alignment != '':
result += ('-a', Rule.Alignment)
return result
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py
index c0b869d250f1..14e36b885966 100644
--- a/BaseTools/Source/Python/GenFds/Fv.py
+++ b/BaseTools/Source/Python/GenFds/Fv.py
@@ -70,14 +70,14 @@ class FV (FvClassObject):
#
def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict = {}, Flag=False) :
- if BaseAddress == None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
+ if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
#
# Check whether FV in Capsule is in FD flash region.
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
#
- if self.CapsuleName != None:
+ if self.CapsuleName is not None:
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
for RegionObj in FdObj.RegionList:
@@ -94,7 +94,7 @@ class FV (FvClassObject):
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
- if self.FvBaseAddress != None:
+ if self.FvBaseAddress is not None:
BaseAddress = self.FvBaseAddress
if not Flag:
self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict)
@@ -136,7 +136,7 @@ class FV (FvClassObject):
FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
FvOutputFile = FvOutputFile + '.Fv'
# BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
- if self.CreateFileName != None:
+ if self.CreateFileName is not None:
FvOutputFile = self.CreateFileName
if Flag:
@@ -163,7 +163,7 @@ class FV (FvClassObject):
NewFvInfo = None
if os.path.exists (FvInfoFileName):
NewFvInfo = open(FvInfoFileName, 'r').read()
- if NewFvInfo != None and NewFvInfo != OrigFvInfo:
+ if NewFvInfo is not None and NewFvInfo != OrigFvInfo:
FvChildAddr = []
AddFileObj = open(FvInfoFileName, 'r')
AddrStrings = AddFileObj.readlines()
@@ -273,16 +273,16 @@ class FV (FvClassObject):
# Add [Options]
#
self.FvInfFile.writelines("[options]" + T_CHAR_LF)
- if BaseAddress != None :
+ if BaseAddress is not None :
self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \
BaseAddress + \
T_CHAR_LF)
- if BlockSize != None:
+ if BlockSize is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize + \
T_CHAR_LF)
- if BlockNum != None:
+ if BlockNum is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockNum + \
T_CHAR_LF)
@@ -293,20 +293,20 @@ class FV (FvClassObject):
self.FvInfFile.writelines("EFI_BLOCK_SIZE = 0x1" + T_CHAR_LF)
for BlockSize in self.BlockSizeList :
- if BlockSize[0] != None:
+ if BlockSize[0] is not None:
self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize[0] + \
T_CHAR_LF)
- if BlockSize[1] != None:
+ if BlockSize[1] is not None:
self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockSize[1] + \
T_CHAR_LF)
- if self.BsBaseAddress != None:
+ if self.BsBaseAddress is not None:
self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.BsBaseAddress)
- if self.RtBaseAddress != None:
+ if self.RtBaseAddress is not None:
self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.RtBaseAddress)
#
@@ -317,7 +317,7 @@ class FV (FvClassObject):
self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \
' %s' %ErasePloarity + \
T_CHAR_LF)
- if not (self.FvAttributeDict == None):
+ if not (self.FvAttributeDict is None):
for FvAttribute in self.FvAttributeDict.keys() :
if FvAttribute == "FvUsedSizeEnable":
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1') :
@@ -328,7 +328,7 @@ class FV (FvClassObject):
' = ' + \
self.FvAttributeDict[FvAttribute] + \
T_CHAR_LF )
- if self.FvAlignment != None:
+ if self.FvAlignment is not None:
self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \
self.FvAlignment.strip() + \
" = TRUE" + \
@@ -337,7 +337,7 @@ class FV (FvClassObject):
#
# Generate FV extension header file
#
- if self.FvNameGuid == None or self.FvNameGuid == '':
+ if self.FvNameGuid is None or self.FvNameGuid == '':
if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable:
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
@@ -442,7 +442,7 @@ class FV (FvClassObject):
# Add [Files]
#
self.FvInfFile.writelines("[files]" + T_CHAR_LF)
- if VtfDict != None and self.UiFvName in VtfDict.keys():
+ if VtfDict is not None and self.UiFvName in VtfDict.keys():
self.FvInfFile.writelines("EFI_FILE_NAME = " + \
VtfDict.get(self.UiFvName) + \
T_CHAR_LF)
diff --git a/BaseTools/Source/Python/GenFds/FvImageSection.py b/BaseTools/Source/Python/GenFds/FvImageSection.py
index 916ff919176c..5026a3ffca2f 100644
--- a/BaseTools/Source/Python/GenFds/FvImageSection.py
+++ b/BaseTools/Source/Python/GenFds/FvImageSection.py
@@ -53,7 +53,7 @@ class FvImageSection(FvImageSectionClassObject):
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):
OutputFileList = []
- if self.FvFileType != None:
+ if self.FvFileType is not None:
FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FvFileType, self.FvFileExtension)
if IsSect :
return FileList, self.Alignment
@@ -96,20 +96,20 @@ class FvImageSection(FvImageSectionClassObject):
#
# Generate Fv
#
- if self.FvName != None:
+ if self.FvName is not None:
Buffer = StringIO.StringIO('')
Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
- if Fv != None:
+ if Fv is not None:
self.Fv = Fv
FvFileName = Fv.AddToBuffer(Buffer, self.FvAddr, MacroDict = Dict, Flag=IsMakefile)
- if Fv.FvAlignment != None:
- if self.Alignment == None:
+ if Fv.FvAlignment is not None:
+ if self.Alignment is None:
self.Alignment = Fv.FvAlignment
else:
if GenFdsGlobalVariable.GetAlignment (Fv.FvAlignment) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
self.Alignment = Fv.FvAlignment
else:
- if self.FvFileName != None:
+ if self.FvFileName is not None:
FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
if os.path.isfile(FvFileName):
FvFileObj = open (FvFileName,'rb')
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py
index 03126e35f47a..515cfd06ccb0 100644
--- a/BaseTools/Source/Python/GenFds/GenFds.py
+++ b/BaseTools/Source/Python/GenFds/GenFds.py
@@ -69,22 +69,22 @@ def main():
EdkLogger.Initialize()
try:
- if Options.verbose != None:
+ if Options.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
GenFdsGlobalVariable.VerboseMode = True
- if Options.FixedAddress != None:
+ if Options.FixedAddress is not None:
GenFdsGlobalVariable.FixedLoadAddress = True
- if Options.quiet != None:
+ if Options.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- if Options.debug != None:
+ if Options.debug is not None:
EdkLogger.SetLevel(Options.debug + 1)
GenFdsGlobalVariable.DebugLevel = Options.debug
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if (Options.Workspace == None):
+ if (Options.Workspace is None):
EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined",
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
elif not os.path.exists(Options.Workspace):
@@ -179,7 +179,7 @@ def main():
# if no tool chain given in command line, get it from target.txt
if not GenFdsGlobalVariable.ToolChainTag:
ToolChainList = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
- if ToolChainList == None or len(ToolChainList) == 0:
+ if ToolChainList is None or len(ToolChainList) == 0:
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.")
if len(ToolChainList) != 1:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.")
@@ -300,7 +300,7 @@ def main():
"No such a Capsule in FDF file: %s" % Options.uiCapName)
GenFdsGlobalVariable.WorkSpace = BuildWorkSpace
- if ArchList != None:
+ if ArchList is not None:
GenFdsGlobalVariable.ArchList = ArchList
# Dsc Build Data will handle Pcd Settings from CommandLine.
@@ -340,7 +340,7 @@ def main():
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except FatalError, X:
- if Options.debug != None:
+ if Options.debug is not None:
import traceback
EdkLogger.quiet(traceback.format_exc())
ReturnCode = X.args[0]
@@ -378,7 +378,7 @@ def SingleCheckCallback(option, opt_str, value, parser):
def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.ConfDir).ToolsDefTxtDatabase
# if user not specify filter, try to deduce it from global data.
- if KeyStringList == None or KeyStringList == []:
+ if KeyStringList is None or KeyStringList == []:
Target = GenFdsGlobalVariable.TargetName
ToolChain = GenFdsGlobalVariable.ToolChainTag
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
@@ -411,7 +411,7 @@ def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
ToolOptionKey = Key + '_' + KeyList[3] + '_FLAGS'
ToolPath = ToolDefinition.get(ToolPathKey)
ToolOption = ToolDefinition.get(ToolOptionKey)
- if ToolPathTmp == None:
+ if ToolPathTmp is None:
ToolPathTmp = ToolPath
else:
if ToolPathTmp != ToolPath:
@@ -523,38 +523,38 @@ class GenFds :
GenFdsGlobalVariable.SetDir ('', FdfParser, WorkSpace, ArchList)
GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!")
- if GenFds.OnlyGenerateThisCap != None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
+ if GenFds.OnlyGenerateThisCap is not None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.get(GenFds.OnlyGenerateThisCap.upper())
- if CapsuleObj != None:
+ if CapsuleObj is not None:
CapsuleObj.GenCapsule()
return
- if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(GenFds.OnlyGenerateThisFd.upper())
- if FdObj != None:
+ if FdObj is not None:
FdObj.GenFd()
return
- elif GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisFv == None:
+ elif GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisFv is None:
for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName]
FdObj.GenFd()
GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ")
- if GenFds.OnlyGenerateThisFv != None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ if GenFds.OnlyGenerateThisFv is not None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper())
- if FvObj != None:
+ if FvObj is not None:
Buffer = StringIO.StringIO()
FvObj.AddToBuffer(Buffer)
Buffer.close()
return
- elif GenFds.OnlyGenerateThisFv == None:
+ elif GenFds.OnlyGenerateThisFv is None:
for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
Buffer = StringIO.StringIO('')
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName]
FvObj.AddToBuffer(Buffer)
Buffer.close()
- if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None and GenFds.OnlyGenerateThisCap == None:
+ if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
for CapsuleName in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
@@ -592,14 +592,14 @@ class GenFds :
def GetFvBlockSize(FvObj):
DefaultBlockSize = 0x1
FdObj = None
- if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
+ if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
- if FdObj == None:
+ if FdObj is None:
for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
for ElementRegion in ElementFd.RegionList:
if ElementRegion.RegionType == 'FV':
for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
@@ -611,7 +611,7 @@ class GenFds :
for ElementRegion in FdObj.RegionList:
if ElementRegion.RegionType == 'FV':
for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName:
+ if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
index 97e20753ae9b..fcb191981c95 100644
--- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
+++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
@@ -229,7 +229,7 @@ class GenFdsGlobalVariable:
Source = SourceList[Index]
Index = Index + 1
- if File.IsBinary and File == Source and Inf.Binaries != None and File in Inf.Binaries:
+ if File.IsBinary and File == Source and Inf.Binaries is not None and File in Inf.Binaries:
# Skip all files that are not binary libraries
if not Inf.LibraryClass:
continue
@@ -420,7 +420,7 @@ class GenFdsGlobalVariable:
if not os.path.exists(Output):
return True
# always update "Output" if no "Input" given
- if Input == None or len(Input) == 0:
+ if Input is None or len(Input) == 0:
return True
# if fdf file is changed after the 'Output" is generated, update the 'Output'
@@ -445,9 +445,9 @@ class GenFdsGlobalVariable:
Cmd += ["-s", Type]
if CompressionType not in [None, '']:
Cmd += ["-c", CompressionType]
- if Guid != None:
+ if Guid is not None:
Cmd += ["-g", Guid]
- if DummyFile != None:
+ if DummyFile is not None:
Cmd += ["--dummy", DummyFile]
if GuidHdrLen not in [None, '']:
Cmd += ["-l", GuidHdrLen]
@@ -455,7 +455,7 @@ class GenFdsGlobalVariable:
#Add each guided attribute
for Attr in GuidAttr:
Cmd += ["-r", Attr]
- if InputAlign != None:
+ if InputAlign is not None:
#Section Align is only for dummy section without section type
for SecAlign in InputAlign:
Cmd += ["--sectionalign", SecAlign]
@@ -509,7 +509,7 @@ class GenFdsGlobalVariable:
@staticmethod
def GetAlignment (AlignString):
- if AlignString == None:
+ if AlignString is None:
return 0
if AlignString in ("1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K"):
return int (AlignString.rstrip('K')) * 1024
@@ -669,13 +669,13 @@ class GenFdsGlobalVariable:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
- if ClassCode != None:
+ if ClassCode is not None:
Cmd += ["-l", ClassCode]
- if Revision != None:
+ if Revision is not None:
Cmd += ["-r", Revision]
- if DeviceId != None:
+ if DeviceId is not None:
Cmd += ["-i", DeviceId]
- if VendorId != None:
+ if VendorId is not None:
Cmd += ["-f", VendorId]
Cmd += ["-o", Output]
@@ -726,7 +726,7 @@ class GenFdsGlobalVariable:
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
(out, error) = PopenObject.communicate()
- while PopenObject.returncode == None :
+ while PopenObject.returncode is None :
PopenObject.wait()
if returnValue != [] and returnValue[0] != 0:
#get command return value
@@ -758,7 +758,7 @@ class GenFdsGlobalVariable:
# @param MacroDict Dictionary that contains macro value pair
#
def MacroExtend (Str, MacroDict={}, Arch='COMMON'):
- if Str == None :
+ if Str is None :
return None
Dict = {'$(WORKSPACE)' : GenFdsGlobalVariable.WorkSpaceDir,
@@ -774,7 +774,7 @@ class GenFdsGlobalVariable:
Dict['$(OUTPUT_DIRECTORY)'] = OutputDir
- if MacroDict != None and len (MacroDict) != 0:
+ if MacroDict is not None and len (MacroDict) != 0:
Dict.update(MacroDict)
for key in Dict.keys():
@@ -794,7 +794,7 @@ class GenFdsGlobalVariable:
# @param PcdPattern pattern that labels a PCD.
#
def GetPcdValue (PcdPattern):
- if PcdPattern == None :
+ if PcdPattern is None :
return None
PcdPair = PcdPattern.lstrip('PCD(').rstrip(')').strip().split('.')
TokenSpace = PcdPair[0]
diff --git a/BaseTools/Source/Python/GenFds/GuidSection.py b/BaseTools/Source/Python/GenFds/GuidSection.py
index ea737bb9a7ea..8362073f97a3 100644
--- a/BaseTools/Source/Python/GenFds/GuidSection.py
+++ b/BaseTools/Source/Python/GenFds/GuidSection.py
@@ -60,7 +60,7 @@ class GuidSection(GuidSectionClassObject) :
#
self.KeyStringList = KeyStringList
self.CurrentArchList = GenFdsGlobalVariable.ArchList
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
@@ -79,7 +79,7 @@ class GuidSection(GuidSectionClassObject) :
if self.FvAddr != []:
#no use FvAddr when the image is processed.
self.FvAddr = []
- if self.FvParentAddr != None:
+ if self.FvParentAddr is not None:
#no use Parent Addr when the image is processed.
self.FvParentAddr = None
@@ -99,20 +99,20 @@ class GuidSection(GuidSectionClassObject) :
if Sect.IncludeFvSection:
self.IncludeFvSection = Sect.IncludeFvSection
- if align != None:
- if MaxAlign == None:
+ if align is not None:
+ if MaxAlign is None:
MaxAlign = align
if GenFdsGlobalVariable.GetAlignment (align) > GenFdsGlobalVariable.GetAlignment (MaxAlign):
MaxAlign = align
if ReturnSectList != []:
- if align == None:
+ if align is None:
align = "1"
for file in ReturnSectList:
SectFile += (file,)
SectAlign.append(align)
- if MaxAlign != None:
- if self.Alignment == None:
+ if MaxAlign is not None:
+ if self.Alignment is None:
self.Alignment = MaxAlign
else:
if GenFdsGlobalVariable.GetAlignment (MaxAlign) > GenFdsGlobalVariable.GetAlignment (self.Alignment):
@@ -128,21 +128,21 @@ class GuidSection(GuidSectionClassObject) :
ExternalTool = None
ExternalOption = None
- if self.NameGuid != None:
+ if self.NameGuid is not None:
ExternalTool, ExternalOption = FindExtendTool(self.KeyStringList, self.CurrentArchList, self.NameGuid)
#
# If not have GUID , call default
# GENCRC32 section
#
- if self.NameGuid == None :
+ if self.NameGuid is None :
GenFdsGlobalVariable.VerboseLogger("Use GenSection function Generate CRC32 Section")
GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType], InputAlign=SectAlign, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
#or GUID not in External Tool List
- elif ExternalTool == None:
+ elif ExternalTool is None:
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
else:
DummyFile = OutputFile + ".dummy"
@@ -170,10 +170,10 @@ class GuidSection(GuidSectionClassObject) :
FirstCall = False
CmdOption = '-e'
- if ExternalOption != None:
+ if ExternalOption is not None:
CmdOption = CmdOption + ' ' + ExternalOption
if not GenFdsGlobalVariable.EnableGenfdsMultiThread:
- if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr != None:
+ if self.ProcessRequired not in ("TRUE", "1") and self.IncludeFvSection and not FvAddrIsSet and self.FvParentAddr is not None:
#FirstCall is only set for the encapsulated flash FV image without process required attribute.
FirstCall = True
#
@@ -213,7 +213,7 @@ class GuidSection(GuidSectionClassObject) :
if self.ExtraHeaderSize != -1:
HeaderLength = str(self.ExtraHeaderSize)
- if self.ProcessRequired == "NONE" and HeaderLength == None:
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
if TempFileSize > InputFileSize:
FileHandleIn.seek(0)
BufferIn = FileHandleIn.read()
@@ -222,7 +222,7 @@ class GuidSection(GuidSectionClassObject) :
if BufferIn == BufferOut[TempFileSize - InputFileSize:]:
HeaderLength = str(TempFileSize - InputFileSize)
#auto sec guided attribute with process required
- if HeaderLength == None:
+ if HeaderLength is None:
Attribute.append('PROCESSING_REQUIRED')
FileHandleIn.close()
@@ -253,7 +253,7 @@ class GuidSection(GuidSectionClassObject) :
HeaderLength = str(self.ExtraHeaderSize)
if self.AuthStatusValid in ("TRUE", "1"):
Attribute.append('AUTH_STATUS_VALID')
- if self.ProcessRequired == "NONE" and HeaderLength == None:
+ if self.ProcessRequired == "NONE" and HeaderLength is None:
GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
Guid=self.NameGuid, GuidAttr=Attribute,
GuidHdrLen=HeaderLength, DummyFile=DummyFile, IsMakefile=IsMakefile)
diff --git a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
index ab4fae611e33..4ef9b4d0e9a8 100644
--- a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py
@@ -41,7 +41,7 @@ class OptRomFileStatement:
#
def GenFfs(self, Dict = {}, IsMakefile=False):
- if self.FileName != None:
+ if self.FileName is not None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
return self.FileName
diff --git a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
index 80c4bbab6eff..62d731fb9cca 100644
--- a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py
@@ -46,10 +46,10 @@ class OptRomInfStatement (FfsInfStatement):
#
def __GetOptRomParams(self):
- if self.OverrideAttribs == None:
+ if self.OverrideAttribs is None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
- if self.OverrideAttribs.NeedCompress == None:
+ if self.OverrideAttribs.NeedCompress is None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
@@ -57,16 +57,16 @@ class OptRomInfStatement (FfsInfStatement):
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
- if self.OverrideAttribs.PciVendorId == None:
+ if self.OverrideAttribs.PciVendorId is None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
- if self.OverrideAttribs.PciClassCode == None:
+ if self.OverrideAttribs.PciClassCode is None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
- if self.OverrideAttribs.PciDeviceId == None:
+ if self.OverrideAttribs.PciDeviceId is None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
- if self.OverrideAttribs.PciRevision == None:
+ if self.OverrideAttribs.PciRevision is None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
@@ -121,7 +121,7 @@ class OptRomInfStatement (FfsInfStatement):
#
OutputFileList = []
- if Rule.FileName != None:
+ if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
@@ -143,7 +143,7 @@ class OptRomInfStatement (FfsInfStatement):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == 'PE32':
- if Sect.FileName != None:
+ if Sect.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
diff --git a/BaseTools/Source/Python/GenFds/OptionRom.py b/BaseTools/Source/Python/GenFds/OptionRom.py
index 2e61a38c1d33..b05841529940 100644
--- a/BaseTools/Source/Python/GenFds/OptionRom.py
+++ b/BaseTools/Source/Python/GenFds/OptionRom.py
@@ -63,7 +63,7 @@ class OPTIONROM (OptionRomClassObject):
FilePathNameList = FfsFile.GenFfs(IsMakefile=Flag)
if len(FilePathNameList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s not produce .efi files, so NO file could be put into option ROM." % (FfsFile.InfFileName))
- if FfsFile.OverrideAttribs == None:
+ if FfsFile.OverrideAttribs is None:
EfiFileList.extend(FilePathNameList)
else:
FileName = os.path.basename(FilePathNameList[0])
@@ -84,7 +84,7 @@ class OPTIONROM (OptionRomClassObject):
BinFileList.append(TmpOutputFile)
else:
FilePathName = FfsFile.GenFfs(IsMakefile=Flag)
- if FfsFile.OverrideAttribs != None:
+ if FfsFile.OverrideAttribs is not None:
FileName = os.path.basename(FilePathName)
TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName, FfsFile.CurrentArch)
if not os.path.exists(TmpOutputDir) :
diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py
index c946758cf549..e639739b7e03 100644
--- a/BaseTools/Source/Python/GenFds/Region.py
+++ b/BaseTools/Source/Python/GenFds/Region.py
@@ -114,7 +114,7 @@ class Region(RegionClassObject):
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
- if FvObj != None :
+ if FvObj is not None :
if not Flag:
GenFdsGlobalVariable.InfLogger(' Region Name = FV')
#
@@ -152,7 +152,7 @@ class Region(RegionClassObject):
# Add the exist Fv image into FD buffer
#
if not Flag:
- if FileName != None:
+ if FileName is not None:
FileLength = os.stat(FileName)[ST_SIZE]
if FileLength > Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
@@ -193,7 +193,7 @@ class Region(RegionClassObject):
if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()]
- if CapsuleObj != None :
+ if CapsuleObj is not None :
CapsuleObj.CapsuleName = RegionData.upper()
GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
#
@@ -270,7 +270,7 @@ class Region(RegionClassObject):
#
self.PadBuffer(Buffer, ErasePolarity, Size)
- if self.RegionType == None:
+ if self.RegionType is None:
GenFdsGlobalVariable.InfLogger(' Region Name = None')
self.PadBuffer(Buffer, ErasePolarity, Size)
@@ -333,7 +333,7 @@ class Region(RegionClassObject):
# first check whether FvObj.BlockSizeList items have only "BlockSize" or "NumBlocks",
# if so, use ExpectedList
for Item in FvObj.BlockSizeList:
- if Item[0] == None or Item[1] == None:
+ if Item[0] is None or Item[1] is None:
FvObj.BlockSizeList = ExpectedList
break
# make sure region size is no smaller than the summed block size in FV
diff --git a/BaseTools/Source/Python/GenFds/Section.py b/BaseTools/Source/Python/GenFds/Section.py
index 463faa378165..5e0b4bee7d1c 100644
--- a/BaseTools/Source/Python/GenFds/Section.py
+++ b/BaseTools/Source/Python/GenFds/Section.py
@@ -116,17 +116,17 @@ class Section (SectionClassObject):
else :
IsSect = False
- if FileExtension != None:
+ if FileExtension is not None:
Suffix = FileExtension
elif IsSect :
Suffix = Section.SectionType.get(FileType)
else:
Suffix = Section.BinFileType.get(FileType)
- if FfsInf == None:
+ if FfsInf is None:
EdkLogger.error("GenFds", GENFDS_ERROR, 'Inf File does not exist!')
FileList = []
- if FileType != None:
+ if FileType is not None:
for File in FfsInf.BinFileList:
if File.Arch == "COMMON" or FfsInf.CurrentArch == File.Arch:
if File.Type == FileType or (int(FfsInf.PiSpecVersion, 16) >= 0x0001000A \
@@ -141,7 +141,7 @@ class Section (SectionClassObject):
else:
GenFdsGlobalVariable.InfLogger ("\nCurrent ARCH \'%s\' of File %s is not in the Support Arch Scope of %s specified by INF %s in FDF" %(FfsInf.CurrentArch, File.File, File.Arch, FfsInf.InfFileName))
- if (not IsMakefile and Suffix != None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix != None):
+ if (not IsMakefile and Suffix is not None and os.path.exists(FfsInf.EfiOutputPath)) or (IsMakefile and Suffix is not None):
#
# Get Makefile path and time stamp
#
diff --git a/BaseTools/Source/Python/GenFds/UiSection.py b/BaseTools/Source/Python/GenFds/UiSection.py
index 4f6926f7cae4..6340520602ee 100644
--- a/BaseTools/Source/Python/GenFds/UiSection.py
+++ b/BaseTools/Source/Python/GenFds/UiSection.py
@@ -52,16 +52,16 @@ class UiSection (UiSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('UI'))
- if self.StringData != None :
+ if self.StringData is not None :
NameString = self.StringData
- elif self.FileName != None:
+ elif self.FileName is not None:
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
diff --git a/BaseTools/Source/Python/GenFds/VerSection.py b/BaseTools/Source/Python/GenFds/VerSection.py
index e29029980fad..11e974b9936e 100644
--- a/BaseTools/Source/Python/GenFds/VerSection.py
+++ b/BaseTools/Source/Python/GenFds/VerSection.py
@@ -52,7 +52,7 @@ class VerSection (VerSectionClassObject):
#
# Prepare the parameter of GenSection
#
- if FfsInf != None:
+ if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
@@ -64,9 +64,9 @@ class VerSection (VerSectionClassObject):
# Get String Data
StringData = ''
- if self.StringData != None:
+ if self.StringData is not None:
StringData = self.StringData
- elif self.FileName != None:
+ elif self.FileName is not None:
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
diff --git a/BaseTools/Source/Python/GenFds/Vtf.py b/BaseTools/Source/Python/GenFds/Vtf.py
index 06e3d275c381..18ea37b9afdd 100644
--- a/BaseTools/Source/Python/GenFds/Vtf.py
+++ b/BaseTools/Source/Python/GenFds/Vtf.py
@@ -68,7 +68,7 @@ class Vtf (VtfClassObject):
FvList = self.GetFvList()
self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
BsfInf = open(self.BsfInfName, 'w+')
- if self.ResetBin != None:
+ if self.ResetBin is not None:
BsfInf.writelines ("[OPTIONS]" + T_CHAR_LF)
BsfInf.writelines ("IA32_RST_BIN" + \
" = " + \
@@ -89,7 +89,7 @@ class Vtf (VtfClassObject):
'N' + \
T_CHAR_LF)
- elif ComponentObj.FilePos != None:
+ elif ComponentObj.FilePos is not None:
BsfInf.writelines ("COMP_LOC" + \
" = " + \
ComponentObj.FilePos + \
diff --git a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
index fdad5a44dc3d..71895d4acddd 100644
--- a/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
+++ b/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
@@ -73,7 +73,7 @@ def _parseForXcode(lines, efifilepath):
if status == 1 and len(line) != 0:
if '_gPcd_BinaryPatch_' in line:
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*_gPcd_BinaryPatch_([\w]+))', line)
- if m != None:
+ if m is not None:
pcds.append((m.groups(0)[3], int(m.groups(0)[0], 16)))
return pcds
@@ -99,20 +99,20 @@ def _parseForGCC(lines, efifilepath):
# status handler
if status == 3:
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)
- if m != None:
+ if m is not None:
sections.append(m.groups(0))
if status == 3:
m = re.match('^.data._gPcd_BinaryPatch_([\w_\d]+)$', line)
- if m != None:
+ if m is not None:
if lines[index + 1]:
PcdName = m.groups(0)[0]
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', lines[index + 1].strip())
- if m != None:
+ if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return None
#redirection
redirection = 0
@@ -152,18 +152,18 @@ def _parseGeneral(lines, efifilepath):
continue
if status == 1 and len(line) != 0:
m = secRe.match(line)
- assert m != None, "Fail to parse the section in map file , line is %s" % line
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
m = symRe.match(line)
- assert m != None, "Fail to parse the symbol in map file, line is %s" % line
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
m2 = re.match('^[_]+gPcd_BinaryPatch_([\w]+)', sym_name)
- if m2 != None:
+ if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
@@ -173,7 +173,7 @@ def _parseGeneral(lines, efifilepath):
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
- if efisecs == None or len(efisecs) == 0:
+ if efisecs is None or len(efisecs) == 0:
return None
pcds = []
@@ -214,12 +214,12 @@ if __name__ == '__main__':
(options, args) = parser.parse_args()
- if options.mapfile == None or options.efifile == None:
+ if options.mapfile is None or options.efifile is None:
print parser.get_usage()
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
- if list != None:
- if options.outfile != None:
+ if list is not None:
+ if options.outfile is not None:
generatePcdTable(list, options.outfile)
else:
generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
diff --git a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
index 942ba88d200f..0c8009cb0b44 100644
--- a/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
+++ b/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
@@ -267,13 +267,13 @@ def Main():
if not os.path.exists (InputFile):
EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile)
return 1
- if CommandOptions.PcdOffset == None or CommandOptions.PcdValue == None or CommandOptions.PcdTypeName == None:
+ if CommandOptions.PcdOffset is None or CommandOptions.PcdValue is None or CommandOptions.PcdTypeName is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
return 1
if CommandOptions.PcdTypeName.upper() not in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64", "VOID*"]:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
return 1
- if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize == None:
+ if CommandOptions.PcdTypeName.upper() == "VOID*" and CommandOptions.PcdMaxSize is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")
return 1
#
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py
index bfdf763a7abc..ede9713c9b8b 100644
--- a/BaseTools/Source/Python/TargetTool/TargetTool.py
+++ b/BaseTools/Source/Python/TargetTool/TargetTool.py
@@ -85,7 +85,7 @@ class TargetTool():
for Key in KeyList:
if type(self.TargetTxtDictionary[Key]) == type([]):
print "%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key]))
- elif self.TargetTxtDictionary[Key] == None:
+ elif self.TargetTxtDictionary[Key] is None:
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
else:
print "%-30s = %s" % (Key, self.TargetTxtDictionary[Key])
@@ -116,14 +116,14 @@ class TargetTool():
Line = "%-30s = \n" % Key
else:
ret = GetConfigureKeyValue(self, Key)
- if ret != None:
+ if ret is not None:
Line = ret
fw.write(Line)
for key in self.TargetTxtDictionary.keys():
if key not in existKeys:
print "Warning: %s does not exist in original configuration file" % key
Line = GetConfigureKeyValue(self, key)
- if Line == None:
+ if Line is None:
Line = "%-30s = " % key
fw.write(Line)
@@ -138,14 +138,14 @@ class TargetTool():
def GetConfigureKeyValue(self, Key):
Line = None
- if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE != None:
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE is not None:
dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
- elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE != None:
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
@@ -157,15 +157,15 @@ def GetConfigureKeyValue(self, Key):
Line = "%-30s = %s\n" % (Key, 'Enable')
elif self.Opt.NUM <= 1:
Line = "%-30s = %s\n" % (Key, 'Disable')
- elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM != None:
+ elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
- elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET != None:
+ elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
- elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH != None:
+ elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
- elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG != None:
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG is not None:
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
- elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE != None:
+ elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE is not None:
buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
if os.path.exists(buildruleFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
@@ -223,7 +223,7 @@ def MyOptionParser():
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.QUIET)
- if os.getenv('WORKSPACE') == None:
+ if os.getenv('WORKSPACE') is None:
print "ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool"
sys.exit(1)
@@ -231,15 +231,15 @@ if __name__ == '__main__':
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
print "The number of args isn't 1 or the value of args is invalid."
sys.exit(1)
- if opt.NUM != None and opt.NUM < 1:
+ if opt.NUM is not None and opt.NUM < 1:
print "The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0."
sys.exit(1)
- if opt.TARGET != None and len(opt.TARGET) > 1:
+ if opt.TARGET is not None and len(opt.TARGET) > 1:
for elem in opt.TARGET:
if elem == '0':
print "0 will clear the TARGET setting in target.txt and can't combine with other value."
sys.exit(1)
- if opt.TARGET_ARCH != None and len(opt.TARGET_ARCH) > 1:
+ if opt.TARGET_ARCH is not None and len(opt.TARGET_ARCH) > 1:
for elem in opt.TARGET_ARCH:
if elem == '0':
print "0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value."
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py
index d1e40b025caa..d07edbd5d872 100644
--- a/BaseTools/Source/Python/Trim/Trim.py
+++ b/BaseTools/Source/Python/Trim/Trim.py
@@ -173,7 +173,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
continue
- if LineIndexOfOriginalFile == None:
+ if LineIndexOfOriginalFile is None:
#
# Any non-empty lines must be from original preprocessed file.
# And this must be the first one.
@@ -193,7 +193,7 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
# convert Decimal number format
Line = gDecNumberPattern.sub(r"\1", Line)
- if LineNumber != None:
+ if LineNumber is not None:
EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
# in case preprocessor removed some lines, like blank or comment lines
if LineNumber <= len(NewLines):
@@ -216,10 +216,10 @@ def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
Brace = 0
for Index in range(len(Lines)):
Line = Lines[Index]
- if MulPatternFlag == False and gTypedef_MulPattern.search(Line) == None:
- if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) == None:
+ if MulPatternFlag == False and gTypedef_MulPattern.search(Line) is None:
+ if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) is None:
# remove "#pragram pack" directive
- if gPragmaPattern.search(Line) == None:
+ if gPragmaPattern.search(Line) is None:
NewLines.append(Line)
continue
elif SinglePatternFlag == False:
@@ -282,9 +282,9 @@ def TrimPreprocessedVfr(Source, Target):
Lines[Index] = "\n"
continue
- if FoundTypedef == False and gTypedefPattern.search(Line) == None:
+ if FoundTypedef == False and gTypedefPattern.search(Line) is None:
# keep "#pragram pack" directive
- if gPragmaPattern.search(Line) == None:
+ if gPragmaPattern.search(Line) is None:
Lines[Index] = "\n"
continue
elif FoundTypedef == False:
@@ -510,7 +510,7 @@ def TrimEdkSources(Source, Target):
for FileName in Files:
Dummy, Ext = os.path.splitext(FileName)
if Ext.upper() not in ['.C', '.H']: continue
- if Target == None or Target == '':
+ if Target is None or Target == '':
TrimEdkSourceCode(
os.path.join(CurrentDir, FileName),
os.path.join(CurrentDir, FileName)
@@ -568,7 +568,7 @@ def TrimEdkSourceCode(Source, Target):
NewLines = None
for Re,Repl in gImportCodePatterns:
- if NewLines == None:
+ if NewLines is None:
NewLines = Re.sub(Repl, Lines)
else:
NewLines = Re.sub(Repl, NewLines)
@@ -672,11 +672,11 @@ def Main():
try:
if CommandOptions.FileType == "Vfr":
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile)
elif CommandOptions.FileType == "Asl":
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile)
elif CommandOptions.FileType == "EdkSourceCode":
@@ -684,13 +684,13 @@ def Main():
elif CommandOptions.FileType == "VfrOffsetBin":
GenerateVfrBinSec(CommandOptions.ModuleName, CommandOptions.DebugDir, CommandOptions.OutputFile)
else :
- if CommandOptions.OutputFile == None:
+ if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong)
except FatalError, X:
import platform
import traceback
- if CommandOptions != None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
+ if CommandOptions is not None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
return 1
except:
diff --git a/BaseTools/Source/Python/UPT/Core/DependencyRules.py b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
index 26c5a97da80f..2af847ed2e0b 100644
--- a/BaseTools/Source/Python/UPT/Core/DependencyRules.py
+++ b/BaseTools/Source/Python/UPT/Core/DependencyRules.py
@@ -104,12 +104,12 @@ class DependencyRules(object):
# check whether satisfied by current distribution
#
if not Exist:
- if DpObj == None:
+ if DpObj is None:
Result = False
break
for GuidVerPair in DpObj.PackageSurfaceArea.keys():
if Dep.GetGuid() == GuidVerPair[0]:
- if Dep.GetVersion() == None or \
+ if Dep.GetVersion() is None or \
len(Dep.GetVersion()) == 0:
Result = True
break
diff --git a/BaseTools/Source/Python/UPT/Core/IpiDb.py b/BaseTools/Source/Python/UPT/Core/IpiDb.py
index f147963288ad..78d67ab31e1e 100644
--- a/BaseTools/Source/Python/UPT/Core/IpiDb.py
+++ b/BaseTools/Source/Python/UPT/Core/IpiDb.py
@@ -247,13 +247,13 @@ class IpiDatabase(object):
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
RePackage):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
#
# Add newly installed DP information to DB.
#
- if NewDpFileName == None or len(NewDpFileName.strip()) == 0:
+ if NewDpFileName is None or len(NewDpFileName.strip()) == 0:
PkgFileName = 'N/A'
else:
PkgFileName = NewDpFileName
@@ -295,13 +295,13 @@ class IpiDatabase(object):
#
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if DpGuid == None or len(DpGuid.strip()) == 0:
+ if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
- if DpVersion == None or len(DpVersion.strip()) == 0:
+ if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
@@ -325,13 +325,13 @@ class IpiDatabase(object):
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
PkgVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if PkgGuid == None or len(PkgGuid.strip()) == 0:
+ if PkgGuid is None or len(PkgGuid.strip()) == 0:
PkgGuid = 'N/A'
- if PkgVersion == None or len(PkgVersion.strip()) == 0:
+ if PkgVersion is None or len(PkgVersion.strip()) == 0:
PkgVersion = 'N/A'
if os.name == 'posix':
@@ -361,13 +361,13 @@ class IpiDatabase(object):
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
DpVersion=None, Path=''):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
- if DpGuid == None or len(DpGuid.strip()) == 0:
+ if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
- if DpVersion == None or len(DpVersion.strip()) == 0:
+ if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
@@ -391,10 +391,10 @@ class IpiDatabase(object):
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
DepexVersion=None):
- if DepexGuid == None or len(DepexGuid.strip()) == 0:
+ if DepexGuid is None or len(DepexGuid.strip()) == 0:
DepexGuid = 'N/A'
- if DepexVersion == None or len(DepexVersion.strip()) == 0:
+ if DepexVersion is None or len(DepexVersion.strip()) == 0:
DepexVersion = 'N/A'
if os.name == 'posix':
@@ -510,7 +510,7 @@ class IpiDatabase(object):
#
def GetDp(self, Guid, Version):
- if Version == None or len(Version.strip()) == 0:
+ if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
(DpGuid, DpVersion) = (Guid, Version)
@@ -642,7 +642,7 @@ class IpiDatabase(object):
PackageVersion)
self.Cur.execute(SqlCommand)
- elif Version == None or len(Version.strip()) == 0:
+ elif Version is None or len(Version.strip()) == 0:
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
(self.PkgTable, Guid)
diff --git a/BaseTools/Source/Python/UPT/Core/PackageFile.py b/BaseTools/Source/Python/UPT/Core/PackageFile.py
index 5fafd85bffbf..ec6f5503eaad 100644
--- a/BaseTools/Source/Python/UPT/Core/PackageFile.py
+++ b/BaseTools/Source/Python/UPT/Core/PackageFile.py
@@ -56,7 +56,7 @@ class PackageFile:
ExtraData="%s (%s)" % (FileName, str(Xstr)))
BadFile = self._ZipFile.testzip()
- if BadFile != None:
+ if BadFile is not None:
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
ExtraData="[%s] in %s" % (BadFile, FileName))
diff --git a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
index d7eaf3ea1d12..9373a144190d 100644
--- a/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
+++ b/BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py
@@ -618,11 +618,11 @@ def GenSourceStatement(SourceFile, Family, FeatureFlag, TagName=None,
# format of SourceFile|Family|TagName|ToolCode|FeatureFlag
#
Statement += SourceFile
- if TagName == None:
+ if TagName is None:
TagName = ''
- if ToolCode == None:
+ if ToolCode is None:
ToolCode = ''
- if HelpStr == None:
+ if HelpStr is None:
HelpStr = ''
if FeatureFlag:
Statement += '|' + Family + '|' + TagName + '|' + ToolCode + '|' + FeatureFlag
diff --git a/BaseTools/Source/Python/UPT/InstallPkg.py b/BaseTools/Source/Python/UPT/InstallPkg.py
index a8d0e1ec440a..c0d56b55aacd 100644
--- a/BaseTools/Source/Python/UPT/InstallPkg.py
+++ b/BaseTools/Source/Python/UPT/InstallPkg.py
@@ -91,7 +91,7 @@ def InstallNewPackage(WorkspaceDir, Path, CustomPath = False):
# @param PathList: The already installed standalone module Path list
#
def InstallNewModule(WorkspaceDir, Path, PathList = None):
- if PathList == None:
+ if PathList is None:
PathList = []
Path = ConvertPath(Path)
Path = os.path.normpath(Path)
--git a/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
index e6d45103f94b..38f7012fd4f8 100644
--- a/BaseTools/Source/Python/UPT/Library/CommentParsing.py
+++ b/BaseTools/Source/Python/UPT/Library/CommentParsing.py
@@ -555,15 +555,15 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
# from HelpText
#
for Token in List[0:NumTokens]:
- if Usage == None and Token in UsageTokens:
+ if Usage is None and Token in UsageTokens:
Usage = UsageTokens[Token]
HelpText = HelpText.replace(Token, '')
- if Usage != None or not ParseVariable:
+ if Usage is not None or not ParseVariable:
for Token in List[0:NumTokens]:
- if Type == None and Token in TypeTokens:
+ if Type is None and Token in TypeTokens:
Type = TypeTokens[Token]
HelpText = HelpText.replace(Token, '')
- if Usage != None:
+ if Usage is not None:
for Token in List[0:NumTokens]:
if Token in RemoveTokens:
HelpText = HelpText.replace(Token, '')
@@ -571,13 +571,13 @@ def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable)
#
# If no Usage token is present and set Usage to UNDEFINED
#
- if Usage == None:
+ if Usage is None:
Usage = 'UNDEFINED'
#
# If no Type token is present and set Type to UNDEFINED
#
- if Type == None:
+ if Type is None:
Type = 'UNDEFINED'
#
diff --git a/BaseTools/Source/Python/UPT/Library/Misc.py b/BaseTools/Source/Python/UPT/Library/Misc.py
index 0d92cb3767c6..719445b3bd9a 100644
--- a/BaseTools/Source/Python/UPT/Library/Misc.py
+++ b/BaseTools/Source/Python/UPT/Library/Misc.py
@@ -120,7 +120,7 @@ def GuidStructureStringToGuidString(GuidValue):
# @param Directory: The directory name
#
def CreateDirectory(Directory):
- if Directory == None or Directory.strip() == "":
+ if Directory is None or Directory.strip() == "":
return True
try:
if not access(Directory, F_OK):
@@ -134,7 +134,7 @@ def CreateDirectory(Directory):
# @param Directory: The directory name
#
def RemoveDirectory(Directory, Recursively=False):
- if Directory == None or Directory.strip() == "" or not \
+ if Directory is None or Directory.strip() == "" or not \
os.path.exists(Directory):
return
if Recursively:
@@ -237,7 +237,7 @@ def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
#
def ValidFile(File, Ext=None):
File = File.replace('\\', '/')
- if Ext != None:
+ if Ext is not None:
FileExt = os.path.splitext(File)[1]
if FileExt.lower() != Ext.lower():
return False
@@ -423,7 +423,7 @@ class Sdict(IterableUserDict):
## update method
#
def update(self, Dict=None, **Kwargs):
- if Dict != None:
+ if Dict is not None:
for Key1, Val1 in Dict.items():
self[Key1] = Val1
if len(Kwargs):
@@ -529,7 +529,7 @@ class PathClass(object):
## _GetFileKey
#
def _GetFileKey(self):
- if self._Key == None:
+ if self._Key is None:
self._Key = self.Path.upper()
return self._Key
## Validate
diff --git a/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
index 028cf9a54f84..2def90a93b51 100644
--- a/BaseTools/Source/Python/UPT/Library/ParserValidate.py
+++ b/BaseTools/Source/Python/UPT/Library/ParserValidate.py
@@ -128,7 +128,7 @@ def IsValidInfComponentType(ComponentType):
#
def IsValidToolFamily(ToolFamily):
ReIsValieFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValieFamily.match(ToolFamily) == None:
+ if ReIsValieFamily.match(ToolFamily) is None:
return False
return True
@@ -159,7 +159,7 @@ def IsValidArch(Arch):
if Arch == 'common':
return True
ReIsValieArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
- if ReIsValieArch.match(Arch) == None:
+ if ReIsValieArch.match(Arch) is None:
return False
return True
@@ -179,7 +179,7 @@ def IsValidFamily(Family):
return True
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValidFamily.match(Family) == None:
+ if ReIsValidFamily.match(Family) is None:
return False
return True
@@ -199,13 +199,13 @@ def IsValidBuildOptionName(BuildOptionName):
ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
- if ReIsValidBuildOption1.match(ToolOptionList[0]) == None:
+ if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
return False
- if ReIsValidBuildOption1.match(ToolOptionList[1]) == None:
+ if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
return False
- if ReIsValidBuildOption2.match(ToolOptionList[2]) == None:
+ if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
return False
if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
@@ -442,7 +442,7 @@ def IsValidDecVersion(Word):
ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
else:
ReIsValidDecVersion = re.compile(r"[0-9]+$")
- if ReIsValidDecVersion.match(Word) == None:
+ if ReIsValidDecVersion.match(Word) is None:
return False
return True
@@ -457,7 +457,7 @@ def IsValidDecVersion(Word):
#
def IsValidHexVersion(Word):
ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
- if ReIsValidHexVersion.match(Word) == None:
+ if ReIsValidHexVersion.match(Word) is None:
return False
return True
@@ -471,7 +471,7 @@ def IsValidHexVersion(Word):
#
def IsValidBuildNumber(Word):
ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
- if ReIsValieBuildNumber.match(Word) == None:
+ if ReIsValieBuildNumber.match(Word) is None:
return False
return True
@@ -488,7 +488,7 @@ def IsValidDepex(Word):
return IsValidCFormatGuid(Word[Index+4:].strip())
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
- if ReIsValidCName.match(Word) == None:
+ if ReIsValidCName.match(Word) is None:
return False
return True
@@ -585,11 +585,11 @@ def IsValidPcdValue(PcdValue):
return True
ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
- if ReIsValidIntegerSingle.match(PcdValue) != None:
+ if ReIsValidIntegerSingle.match(PcdValue) is not None:
return True
ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
- if ReIsValidIntegerMulti.match(PcdValue) != None:
+ if ReIsValidIntegerMulti.match(PcdValue) is not None:
return True
#
@@ -654,7 +654,7 @@ def IsValidPcdValue(PcdValue):
#
def IsValidCVariableName(CName):
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
- if ReIsValidCName.match(CName) == None:
+ if ReIsValidCName.match(CName) is None:
return False
return True
@@ -669,7 +669,7 @@ def IsValidCVariableName(CName):
#
def IsValidIdentifier(Ident):
ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
- if ReIdent.match(Ident) == None:
+ if ReIdent.match(Ident) is None:
return False
return True
@@ -683,7 +683,7 @@ def IsValidIdentifier(Ident):
def IsValidDecVersionVal(Ver):
ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
- if ReVersion.match(Ver) == None:
+ if ReVersion.match(Ver) is None:
return False
return True
diff --git a/BaseTools/Source/Python/UPT/Library/Parsing.py b/BaseTools/Source/Python/UPT/Library/Parsing.py
index c34e7751442a..791e064761c0 100644
--- a/BaseTools/Source/Python/UPT/Library/Parsing.py
+++ b/BaseTools/Source/Python/UPT/Library/Parsing.py
@@ -134,7 +134,7 @@ def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
- if TokenInfoString != '' and TokenInfoString != None:
+ if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, DataType.TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
@@ -433,7 +433,7 @@ def GetComponents(Lines, KeyValues, CommentCharacter):
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
- if Line == None or Line == '':
+ if Line is None or Line == '':
continue
if FindBlock == False:
@@ -921,7 +921,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
FileLocalMacros[Name] = Value
ReIsValidMacroName = re.compile(r"^[A-Z][A-Z0-9_]*$", re.DOTALL)
- if ReIsValidMacroName.match(Name) == None:
+ if ReIsValidMacroName.match(Name) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACRONAME_INVALID % (Name),
@@ -940,7 +940,7 @@ def MacroParser(Line, FileName, SectionType, FileLocalMacros):
# <UnicodeString>, <CArray> are subset of <AsciiString>.
#
ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
- if ReIsValidMacroValue.match(Value) == None:
+ if ReIsValidMacroValue.match(Value) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACROVALUE_INVALID % (Value),
@@ -979,7 +979,7 @@ def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
else:
Section = '[' + SectionName + ']'
Content += '\n' + Section + '\n'
- if StatementList != None:
+ if StatementList is not None:
for Statement in StatementList:
LineList = Statement.split('\n')
NewStatement = ""
diff --git a/BaseTools/Source/Python/UPT/Library/String.py b/BaseTools/Source/Python/UPT/Library/String.py
index 278073e4a379..b79891ea1417 100644
--- a/BaseTools/Source/Python/UPT/Library/String.py
+++ b/BaseTools/Source/Python/UPT/Library/String.py
@@ -166,7 +166,7 @@ def SplitModuleType(Key):
#
def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None, FileName=None, Flag=False):
LastString = String
- if MacroDefinitions == None:
+ if MacroDefinitions is None:
MacroDefinitions = {}
while MacroDefinitions:
QuotedStringList = []
@@ -244,7 +244,7 @@ def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None
#
def NormPath(Path, Defines=None):
IsRelativePath = False
- if Defines == None:
+ if Defines is None:
Defines = {}
if Path:
if Path[0] == '.':
@@ -524,7 +524,7 @@ def PreCheck(FileName, FileContent, SupSectionTag):
# to be checked
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper() and Root:
ContainerFile = open(ContainerFilename, 'r').read()
@@ -552,7 +552,7 @@ def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line,
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
- if CheckFilename != '' and CheckFilename != None:
+ if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
diff --git a/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
index 0014a7561ba8..66eefee9db31 100644
--- a/BaseTools/Source/Python/UPT/Library/UniClassObject.py
+++ b/BaseTools/Source/Python/UPT/Library/UniClassObject.py
@@ -161,7 +161,7 @@ def GetLanguageCode1766(LangName, File=None):
for Key in gLANG_CONV_TABLE.keys():
if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
return Key
- if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
for Key in gLANG_CONV_TABLE.keys():
if Key == LangName[0:3].lower():
return Key
@@ -186,7 +186,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = gLANG_CONV_TABLE.get(LangName.lower())
- if TempLangName != None:
+ if TempLangName is not None:
return TempLangName
return LangName
else:
@@ -200,7 +200,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
if LangName.isalpha():
return LangName
elif length == 3:
- if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None:
+ if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
@@ -208,7 +208,7 @@ def GetLanguageCode(LangName, IsCompatibleMode, File):
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
- if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) == None and LangName[3] == '-':
+ if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.Error("Unicode File Parser",
@@ -270,14 +270,14 @@ class StringDefClassObject(object):
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
- if Name != None:
+ if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
- if Value != None:
+ if Value is not None:
self.StringValue = Value
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
- if Token != None:
+ if Token is not None:
self.Token = Token
def __str__(self):
@@ -288,7 +288,7 @@ class StringDefClassObject(object):
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
- if Value != None:
+ if Value is not None:
if self.StringValue:
self.StringValue = self.StringValue + '\r\n' + Value
else:
@@ -393,7 +393,7 @@ class UniFileClassObject(object):
# Check the string name is the upper character
if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
+ if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
@@ -798,7 +798,7 @@ class UniFileClassObject(object):
# Load a .uni file
#
def LoadUniFile(self, File = None):
- if File == None:
+ if File is None:
EdkLogger.Error("Unicode File Parser",
ToolError.PARSER_ERROR,
Message='No unicode file is given',
@@ -901,7 +901,7 @@ class UniFileClassObject(object):
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
- if Value != None:
+ if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
diff --git a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
index d7614b884990..f20ae4dfa82f 100644
--- a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
+++ b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py
@@ -36,14 +36,14 @@ import Logger.Log as Logger
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
- if String != '' and String != None:
+ if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if type(Item) == type([]):
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
@@ -52,7 +52,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
- if Key != '' and Key != None and Value != '' and Value != None:
+ if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
@@ -66,7 +66,7 @@ def CreateXmlElement(Name, String, NodeList, AttributeList):
# @param String A XPath style path.
#
def XmlList(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
@@ -101,7 +101,7 @@ def XmlList(Dom, String):
# @param String A XPath style path.
#
def XmlNode(Dom, String):
- if String == None or String == "" or Dom == None or Dom == "":
+ if String is None or String == "" or Dom is None or Dom == "":
return None
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
diff --git a/BaseTools/Source/Python/UPT/Logger/Log.py b/BaseTools/Source/Python/UPT/Logger/Log.py
index 407a1b32b6ee..ae06a1ae2a50 100644
--- a/BaseTools/Source/Python/UPT/Logger/Log.py
+++ b/BaseTools/Source/Python/UPT/Logger/Log.py
@@ -134,7 +134,7 @@ def Debug(Level, Message, ExtraData=None):
"msg" : Message,
}
- if ExtraData != None:
+ if ExtraData is not None:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict
@@ -165,10 +165,10 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
#
# if no tool name given, use caller's source file name as tool name
#
- if ToolName == None or ToolName == "":
+ if ToolName is None or ToolName == "":
ToolName = os.path.basename(extract_stack()[-2][0])
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
@@ -180,12 +180,12 @@ def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
"msg" : Message,
}
- if File != None:
+ if File is not None:
LogText = _WARNING_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = _WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
- if ExtraData != None:
+ if ExtraData is not None:
LogText += "\n %s" % ExtraData
_INFO_LOGGER.log(WARN, LogText)
@@ -215,18 +215,18 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
ExtraData=None, RaiseError=IS_RAISE_ERROR):
if ToolName:
pass
- if Line == None:
+ if Line is None:
Line = "..."
else:
Line = "%d" % Line
- if Message == None:
+ if Message is None:
if ErrorCode in gERROR_MESSAGE:
Message = gERROR_MESSAGE[ErrorCode]
else:
Message = gERROR_MESSAGE[UNKNOWN_ERROR]
- if ExtraData == None:
+ if ExtraData is None:
ExtraData = ""
TemplateDict = {
@@ -238,7 +238,7 @@ def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
"extra" : ExtraData
}
- if File != None:
+ if File is not None:
LogText = _ERROR_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = __ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
diff --git a/BaseTools/Source/Python/UPT/MkPkg.py b/BaseTools/Source/Python/UPT/MkPkg.py
index 87c84f0cc25b..ff9aa7fb117c 100644
--- a/BaseTools/Source/Python/UPT/MkPkg.py
+++ b/BaseTools/Source/Python/UPT/MkPkg.py
@@ -73,7 +73,7 @@ def CheckForExistingDp(Path):
#
#
def Main(Options = None):
- if Options == None:
+ if Options is None:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
try:
DataBase = GlobalData.gDB
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
index f968beee6081..33b142d64e07 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py
@@ -271,7 +271,7 @@ class InfBinariesObject(InfSectionCommonDef):
#
pass
- if InfBianryVerItemObj != None:
+ if InfBianryVerItemObj is not None:
if self.Binaries.has_key((InfBianryVerItemObj)):
BinariesList = self.Binaries[InfBianryVerItemObj]
BinariesList.append((InfBianryVerItemObj, VerComment))
@@ -521,7 +521,7 @@ class InfBinariesObject(InfSectionCommonDef):
# #
# pass
- if InfBianryCommonItemObj != None:
+ if InfBianryCommonItemObj is not None:
if self.Binaries.has_key((InfBianryCommonItemObj)):
BinariesList = self.Binaries[InfBianryCommonItemObj]
BinariesList.append((InfBianryCommonItemObj, ItemComment))
@@ -538,11 +538,11 @@ class InfBinariesObject(InfSectionCommonDef):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
- if UiInf != None:
+ if UiInf is not None:
if len(UiInf) > 0:
#
# Check UI
@@ -672,7 +672,7 @@ class InfBinariesObject(InfSectionCommonDef):
# #
# pass
- if InfBianryUiItemObj != None:
+ if InfBianryUiItemObj is not None:
if self.Binaries.has_key((InfBianryUiItemObj)):
BinariesList = self.Binaries[InfBianryUiItemObj]
BinariesList.append((InfBianryUiItemObj, UiComment))
@@ -681,7 +681,7 @@ class InfBinariesObject(InfSectionCommonDef):
BinariesList = []
BinariesList.append((InfBianryUiItemObj, UiComment))
self.Binaries[InfBianryUiItemObj] = BinariesList
- if Ver != None and len(Ver) > 0:
+ if Ver is not None and len(Ver) > 0:
self.CheckVer(Ver, __SupArchList)
if CommonBinary and len(CommonBinary) > 0:
self.ParseCommonBinary(CommonBinary, __SupArchList)
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
index 1d074ee638fd..bbc797f65e37 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py
@@ -62,7 +62,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciVendorId != None:
+ if self.PciVendorId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
LineInfo=self.CurrentLine)
return False
@@ -86,7 +86,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciDeviceId != None:
+ if self.PciDeviceId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
LineInfo=self.CurrentLine)
return False
@@ -110,7 +110,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciClassCode != None:
+ if self.PciClassCode is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
LineInfo=self.CurrentLine)
return False
@@ -135,7 +135,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciRevision != None:
+ if self.PciRevision is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
LineInfo=self.CurrentLine)
return False
@@ -159,7 +159,7 @@ class InfDefSectionOptionRomInfo():
#
# Value has been set before.
#
- if self.PciCompress != None:
+ if self.PciCompress is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
LineInfo=self.CurrentLine)
return False
@@ -215,11 +215,11 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.BaseName != None:
+ if self.BaseName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
LineInfo=self.CurrentLine)
return False
- if not (BaseName == '' or BaseName == None):
+ if not (BaseName == '' or BaseName is None):
if IsValidWord(BaseName) and not BaseName.startswith("_"):
self.BaseName = InfDefMember()
self.BaseName.SetValue(BaseName)
@@ -243,7 +243,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.FileGuid != None:
+ if self.FileGuid is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_FILE_GUID),
LineInfo=self.CurrentLine)
@@ -274,7 +274,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.ModuleType != None:
+ if self.ModuleType is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_MODULE_TYPE),
LineInfo=self.CurrentLine)
@@ -309,7 +309,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
def SetModuleUniFileName(self, ModuleUniFileName, Comments):
if Comments:
pass
- if self.ModuleUniFileName != None:
+ if self.ModuleUniFileName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_MODULE_UNI_FILE),
LineInfo=self.CurrentLine)
self.ModuleUniFileName = ModuleUniFileName
@@ -327,7 +327,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.InfVersion != None:
+ if self.InfVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_INF_VERSION),
LineInfo=self.CurrentLine)
@@ -368,7 +368,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.EdkReleaseVersion != None:
+ if self.EdkReleaseVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
LineInfo=self.CurrentLine)
@@ -401,7 +401,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.UefiSpecificationVersion != None:
+ if self.UefiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
@@ -434,7 +434,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.PiSpecificationVersion != None:
+ if self.PiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
@@ -495,7 +495,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.VersionString != None:
+ if self.VersionString is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_VERSION_STRING),
LineInfo=self.CurrentLine)
@@ -517,7 +517,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.PcdIsDriver != None:
+ if self.PcdIsDriver is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
LineInfo=self.CurrentLine)
@@ -710,7 +710,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.Shadow != None:
+ if self.Shadow is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
LineInfo=self.CurrentLine)
return False
@@ -731,7 +731,7 @@ class InfDefSection(InfDefSectionOptionRomInfo):
# <CustomMake> ::= [<Family> "|"] <Filename>
#
def SetCustomMakefile(self, CustomMakefile, Comments):
- if not (CustomMakefile == '' or CustomMakefile == None):
+ if not (CustomMakefile == '' or CustomMakefile is None):
ValueList = GetSplitValueList(CustomMakefile)
if len(ValueList) == 1:
FileName = ValueList[0]
@@ -811,12 +811,12 @@ class InfDefSection(InfDefSectionOptionRomInfo):
#
# Value has been set before.
#
- if self.UefiHiiResourceSection != None:
+ if self.UefiHiiResourceSection is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
%(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
LineInfo=self.CurrentLine)
return False
- if not (UefiHiiResourceSection == '' or UefiHiiResourceSection == None):
+ if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
if (IsValidBoolType(UefiHiiResourceSection)):
self.UefiHiiResourceSection = InfDefMember()
self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
@@ -948,7 +948,7 @@ class InfDefObject(InfSectionCommonDef):
RaiseError=True)
if Name == DT.TAB_INF_DEFINES_INF_VERSION:
HasFoundInfVersionFalg = True
- if not (Name == '' or Name == None):
+ if not (Name == '' or Name is None):
#
# Process "SPEC" Keyword definition.
#
@@ -971,7 +971,7 @@ class InfDefObject(InfSectionCommonDef):
LineInfo=LineInfo)
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
- if (ProcessFunc != None):
+ if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
else:
@@ -991,7 +991,7 @@ class InfDefObject(InfSectionCommonDef):
#
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
- if (ProcessFunc != None):
+ if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
#
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
index 23125552e06d..fb8d1f5a62ee 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py
@@ -107,7 +107,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
#
# Get/Set Usage and HelpString
#
- if CommentsList != None and len(CommentsList) != 0 :
+ if CommentsList is not None and len(CommentsList) != 0 :
CommentInsList = []
PreUsage = None
PreGuidType = None
@@ -126,7 +126,7 @@ def ParseGuidComment(CommentsList, InfGuidItemObj):
[],
True)
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -236,7 +236,7 @@ class InfGuidObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupportArchList.append(ArchItem)
--git a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
index 45fba31aaae9..dce75063dfb5 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py
@@ -43,7 +43,7 @@ class InfHeaderObject():
# @param FileName: File Name
#
def SetFileName(self, FileName):
- if not (FileName == '' or FileName == None):
+ if not (FileName == '' or FileName is None):
self.FileName = FileName
return True
else:
@@ -59,7 +59,7 @@ class InfHeaderObject():
# @param Abstract: Abstract
#
def SetAbstract(self, Abstract):
- if not (Abstract == '' or Abstract == None):
+ if not (Abstract == '' or Abstract is None):
self.Abstract = Abstract
return True
else:
@@ -75,7 +75,7 @@ class InfHeaderObject():
# @param Description: Description content
#
def SetDescription(self, Description):
- if not (Description == '' or Description == None):
+ if not (Description == '' or Description is None):
self.Description = Description
return True
else:
@@ -91,7 +91,7 @@ class InfHeaderObject():
# @param Copyright: Copyright content
#
def SetCopyright(self, Copyright):
- if not (Copyright == '' or Copyright == None):
+ if not (Copyright == '' or Copyright is None):
self.Copyright = Copyright
return True
else:
@@ -107,7 +107,7 @@ class InfHeaderObject():
# @param License: License content
#
def SetLicense(self, License):
- if not (License == '' or License == None):
+ if not (License == '' or License is None):
self.License = License
return True
else:
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
index b18c4c381bc0..e588c6ba66d8 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py
@@ -38,10 +38,10 @@ def GetArchModuleType(KeyList):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
- if (ModuleItem == '' or ModuleItem == None):
+ if (ModuleItem == '' or ModuleItem is None):
ModuleItem = 'COMMON'
if ArchItem not in __SupArchList:
@@ -136,7 +136,7 @@ class InfLibraryClassObject():
LibItemObj.CurrentLine.SetLineNo(LibItem[2][1])
LibItemObj.CurrentLine.SetLineString(LibItem[2][0])
LibItem = LibItem[0]
- if HelpStringObj != None:
+ if HelpStringObj is not None:
LibItemObj.SetHelpString(HelpStringObj)
if len(LibItem) >= 1:
if LibItem[0].strip() != '':
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
index 74099e208860..37f8cb2336bb 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py
@@ -135,9 +135,9 @@ class InfSpecialCommentObject(InfSectionCommonDef):
# An encapsulate of Error for INF parser.
#
def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
- if ErrorCode == None:
+ if ErrorCode is None:
ErrorCode = ToolError.FORMAT_INVALID
- if LineInfo == None:
+ if LineInfo is None:
LineInfo = ['', -1, '']
Logger.Error("InfParser",
ErrorCode,
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
index 37399134dbf3..01c854a8470e 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py
@@ -75,7 +75,7 @@ class InfPackageObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
SupArchList.append(ArchItem)
@@ -84,7 +84,7 @@ class InfPackageObject():
HelpStringObj = PackageItem[1]
CurrentLineOfPackItem = PackageItem[2]
PackageItem = PackageItem[0]
- if HelpStringObj != None:
+ if HelpStringObj is not None:
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
PackageItemObj.SetHelpString(HelpString)
if len(PackageItem) >= 1:
@@ -183,5 +183,5 @@ class InfPackageObject():
return True
def GetPackages(self, Arch = None):
- if Arch == None:
+ if Arch is None:
return self.Packages
\ No newline at end of file
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
index 7b07036f91c2..d2712a97f2ff 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py
@@ -43,7 +43,7 @@ def ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
if PcdTypeItem1.upper != DT.TAB_INF_FEATURE_PCD.upper():
@@ -82,7 +82,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
if PcdTypeItem == 'FeaturePcd':
CommentItemUsage = DT.USAGE_ITEM_CONSUMES
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == 1:
@@ -96,7 +96,7 @@ def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
else:
continue
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentList) and CommentItemUsage == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -326,7 +326,7 @@ class InfPcdObject():
#
# Validate PcdType
#
- if (PcdTypeItem1 == '' or PcdTypeItem1 == None):
+ if (PcdTypeItem1 == '' or PcdTypeItem1 is None):
return False
else:
if not IsValidPcdType(PcdTypeItem1):
@@ -346,7 +346,7 @@ class InfPcdObject():
CurrentLineOfPcdItem = PcdItem[2]
PcdItem = PcdItem[0]
- if CommentList != None and len(CommentList) != 0:
+ if CommentList is not None and len(CommentList) != 0:
PcdItemObj = ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj)
else:
CommentItemIns = InfPcdItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
index 4df62bb459ff..eb6b6927140b 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py
@@ -51,7 +51,7 @@ def ParsePpiComment(CommentsList, InfPpiItemObj):
if CommentItemString:
pass
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -213,7 +213,7 @@ class InfPpiObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -290,7 +290,7 @@ class InfPpiObject():
#
# Get/Set Usage and HelpString for PPI entry
#
- if CommentsList != None and len(CommentsList) != 0:
+ if CommentsList is not None and len(CommentsList) != 0:
InfPpiItemObj = ParsePpiComment(CommentsList, InfPpiItemObj)
else:
CommentItemIns = InfPpiItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
index c94e53c98f87..eb03095d6fec 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py
@@ -49,7 +49,7 @@ def ParseProtocolComment(CommentsList, InfProtocolItemObj):
if CommentItemString:
pass
- if CommentItemHelpText == None:
+ if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
@@ -203,7 +203,7 @@ class InfProtocolObject():
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
@@ -259,7 +259,7 @@ class InfProtocolObject():
#
# Get/Set Usage and HelpString for Protocol entry
#
- if CommentsList != None and len(CommentsList) != 0:
+ if CommentsList is not None and len(CommentsList) != 0:
InfProtocolItemObj = ParseProtocolComment(CommentsList, InfProtocolItemObj)
else:
CommentItemIns = InfProtocolItemCommentContent()
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
index 9988f8ecfeed..2302dd5b9673 100644
--- a/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
+++ b/BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py
@@ -211,7 +211,7 @@ class InfSourcesObject(InfSectionCommonDef):
#
# Validate Arch
#
- if (ArchItem == '' or ArchItem == None):
+ if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
diff --git a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
index 498f2d7634a5..4eed04c01765 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py
@@ -155,7 +155,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
DT.MODEL_META_DATA_HEADER,
DefineSectionMacros)
- if Name != None:
+ if Name is not None:
DefineSectionMacros[Name] = Value
continue
@@ -168,7 +168,7 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
FileNameString,
DT.MODEL_META_DATA_PACKAGE,
DefineSectionMacros)
- if Name != None:
+ if Name is not None:
PackageSectionMacros[Name] = Value
continue
diff --git a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
index f1d6943cbfff..f220402cb577 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py
@@ -112,7 +112,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
if BinLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = BinLineContent[BinLineContent.find(DT.TAB_COMMENT_SPLIT):]
BinLineContent = BinLineContent[:BinLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -123,7 +123,7 @@ class InfBinarySectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_BINARY_FILE,
self.FileLocalMacros)
- if MacroDef[0] != None:
+ if MacroDef[0] is not None:
SectionMacros[MacroDef[0]] = MacroDef[1]
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
index d00087a128a0..f7749d55a062 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py
@@ -133,7 +133,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -144,7 +144,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_META_DATA_HEADER,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
self.FileLocalMacros[Name] = Value
continue
@@ -173,7 +173,7 @@ class InfDefinSectionParser(InfParserSectionRoot):
Name, Value = _ValueList[0], _ValueList[1]
InfDefMemberObj = InfDefMember(Name, Value)
- if (LineComment != None):
+ if (LineComment is not None):
InfDefMemberObj.Comments.SetHeaderComments(LineComment.GetHeaderComments())
InfDefMemberObj.Comments.SetTailComments(LineComment.GetTailComments())
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
index 5cafc80ca5c3..332e2f014310 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py
@@ -87,7 +87,7 @@ class InfDepexSectionParser(InfParserSectionRoot):
ReFormatComment = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
for CommentItem in DepexComment:
CommentContent = CommentItem[0]
- if ReFormatComment.match(CommentContent) != None:
+ if ReFormatComment.match(CommentContent) is not None:
FormatCommentLn = CommentItem[1] + 1
continue
diff --git a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
index 12ffedaaec61..956c116c6e79 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py
@@ -77,7 +77,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_GUID,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
CommentsList = []
ValueList = []
@@ -164,7 +164,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PPI,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
@@ -334,7 +334,7 @@ class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PROTOCOL,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
index 210f973f1a4d..549e67f08d64 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py
@@ -96,7 +96,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
if LibLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
LibTailComments = LibLineContent[LibLineContent.find(DT.TAB_COMMENT_SPLIT):]
LibLineContent = LibLineContent[:LibLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LibLineComment == None:
+ if LibLineComment is None:
LibLineComment = InfLineCommentObject()
LibLineComment.SetTailComments(LibTailComments)
@@ -107,7 +107,7 @@ class InfLibrarySectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_LIBRARY_CLASS,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LibLineComment = None
LibHeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
index 67f1145322ad..8fb2898826e6 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py
@@ -89,7 +89,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
if PkgLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = PkgLineContent[PkgLineContent.find(DT.TAB_COMMENT_SPLIT):]
PkgLineContent = PkgLineContent[:PkgLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
@@ -99,7 +99,7 @@ class InfPackageSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_META_DATA_PACKAGE,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParser.py b/BaseTools/Source/Python/UPT/Parser/InfParser.py
index e7bef2e35e0f..7bea49e0e861 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParser.py
@@ -97,7 +97,7 @@ class InfParser(InfSectionParser):
#
# Load Inf file if filename is not None
#
- if Filename != None:
+ if Filename is not None:
self.ParseInfFile(Filename)
## Parse INF file
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
index a416897d27ae..6a335e8b6c75 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py
@@ -73,9 +73,9 @@ gINF_SECTION_DEF = {
# @param Flag If the flag set to True, need to skip macros in a quoted string
#
def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Flag=False):
- if GlobalMacros == None:
+ if GlobalMacros is None:
GlobalMacros = {}
- if SectionMacros == None:
+ if SectionMacros is None:
SectionMacros = {}
FileName = LineInfo[0]
diff --git a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
index f4324cc2ff1b..a9b87fdc0565 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py
@@ -95,7 +95,7 @@ class InfPcdSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_PCD,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
diff --git a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
index 51db7960353d..645c2c341460 100644
--- a/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
+++ b/BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py
@@ -86,7 +86,7 @@ class InfSourceSectionParser(InfParserSectionRoot):
if SrcLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = SrcLineContent[SrcLineContent.find(DT.TAB_COMMENT_SPLIT):]
SrcLineContent = SrcLineContent[:SrcLineContent.find(DT.TAB_COMMENT_SPLIT)]
- if LineComment == None:
+ if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
@@ -97,7 +97,7 @@ class InfSourceSectionParser(InfParserSectionRoot):
FileName,
DT.MODEL_EFI_SOURCE_FILE,
self.FileLocalMacros)
- if Name != None:
+ if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
index a15173285345..e37a0b6c3be7 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py
@@ -167,11 +167,11 @@ class InfPomAlignment(ModuleObject):
#
# Convert UEFI/PI version to decimal number
#
- if DefineObj.GetUefiSpecificationVersion() != None:
+ if DefineObj.GetUefiSpecificationVersion() is not None:
__UefiVersion = DefineObj.GetUefiSpecificationVersion().GetValue()
__UefiVersion = ConvertVersionToDecimal(__UefiVersion)
self.SetUefiSpecificationVersion(str(__UefiVersion))
- if DefineObj.GetPiSpecificationVersion() != None:
+ if DefineObj.GetPiSpecificationVersion() is not None:
__PiVersion = DefineObj.GetPiSpecificationVersion().GetValue()
__PiVersion = ConvertVersionToDecimal(__PiVersion)
@@ -186,7 +186,7 @@ class InfPomAlignment(ModuleObject):
# must exist items in INF define section
# MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
#
- if DefineObj.GetModuleType() == None:
+ if DefineObj.GetModuleType() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("MODULE_TYPE"), File=self.FullPath)
else:
@@ -205,7 +205,7 @@ class InfPomAlignment(ModuleObject):
Line=DefineObj.ModuleType.CurrentLine.LineNo,
ExtraData=DefineObj.ModuleType.CurrentLine.LineString)
self.LibModuleTypeList.append(ModuleType)
- if DefineObj.GetBaseName() == None:
+ if DefineObj.GetBaseName() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("BASE_NAME"), File=self.FullPath)
else:
@@ -214,17 +214,17 @@ class InfPomAlignment(ModuleObject):
self.UniFileClassObject = UniFileClassObject([PathClass(DefineObj.GetModuleUniFileName())])
else:
self.UniFileClassObject = None
- if DefineObj.GetInfVersion() == None:
+ if DefineObj.GetInfVersion() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("INF_VERSION"), File=self.FullPath)
else:
self.SetVersion(DefineObj.GetInfVersion().GetValue())
- if DefineObj.GetFileGuid() == None:
+ if DefineObj.GetFileGuid() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("FILE_GUID"), File=self.FullPath)
else:
self.SetGuid(DefineObj.GetFileGuid().GetValue())
- if DefineObj.GetVersionString() == None:
+ if DefineObj.GetVersionString() is None:
#
# VERSION_STRING is missing from the [Defines] section, tools must assume that the module's version is 0.
#
@@ -256,7 +256,7 @@ class InfPomAlignment(ModuleObject):
if not (ModuleTypeValue == 'SEC' or ModuleTypeValue == 'PEI_CORE' or ModuleTypeValue == 'PEIM'):
Logger.Error("InfParser", FORMAT_INVALID, ST.ERR_INF_PARSER_DEFINE_SHADOW_INVALID, File=self.FullPath)
- if DefineObj.GetPcdIsDriver() != None:
+ if DefineObj.GetPcdIsDriver() is not None:
self.SetPcdIsDriver(DefineObj.GetPcdIsDriver().GetValue())
#
# LIBRARY_CLASS
@@ -499,7 +499,7 @@ class InfPomAlignment(ModuleObject):
LibraryClass.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
LibraryClass.SetSupModuleList(Item.GetSupModuleList())
HelpStringObj = Item.GetHelpString()
- if HelpStringObj != None:
+ if HelpStringObj is not None:
CommentString = GetHelpStringByRemoveHashKey(HelpStringObj.HeaderComments +
HelpStringObj.TailComments)
HelpTextHeaderObj = CommonObject.TextObject()
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
index 042d4784c84c..cca70e564042 100644
--- a/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
+++ b/BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py
@@ -45,7 +45,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
CustomMakefile = DefineObj.GetCustomMakefile()
UefiHiiResourceSection = DefineObj.GetUefiHiiResourceSection()
- if EdkReleaseVersion != None:
+ if EdkReleaseVersion is not None:
Name = DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION
Value = EdkReleaseVersion.GetValue()
Statement = _GenInfDefineStateMent(EdkReleaseVersion.Comments.GetHeaderComments(),
@@ -54,7 +54,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
EdkReleaseVersion.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if Shadow != None:
+ if Shadow is not None:
Name = DT.TAB_INF_DEFINES_SHADOW
Value = Shadow.GetValue()
Statement = _GenInfDefineStateMent(Shadow.Comments.GetHeaderComments(),
@@ -63,7 +63,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
Shadow.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if DpxSource != None:
+ if DpxSource is not None:
Name = DT.TAB_INF_DEFINES_DPX_SOURCE
for DpxSourceItem in DpxSource:
Value = DpxSourceItem[0]
@@ -73,7 +73,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
DpxSourceItem[1].GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciVendorId != None:
+ if PciVendorId is not None:
Name = DT.TAB_INF_DEFINES_PCI_VENDOR_ID
Value = PciVendorId.GetValue()
Statement = _GenInfDefineStateMent(PciVendorId.Comments.GetHeaderComments(),
@@ -82,7 +82,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciVendorId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciDeviceId != None:
+ if PciDeviceId is not None:
Name = DT.TAB_INF_DEFINES_PCI_DEVICE_ID
Value = PciDeviceId.GetValue()
Statement = _GenInfDefineStateMent(PciDeviceId.Comments.GetHeaderComments(),
@@ -91,7 +91,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciDeviceId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciClassCode != None:
+ if PciClassCode is not None:
Name = DT.TAB_INF_DEFINES_PCI_CLASS_CODE
Value = PciClassCode.GetValue()
Statement = _GenInfDefineStateMent(PciClassCode.Comments.GetHeaderComments(),
@@ -100,7 +100,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciClassCode.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciRevision != None:
+ if PciRevision is not None:
Name = DT.TAB_INF_DEFINES_PCI_REVISION
Value = PciRevision.GetValue()
Statement = _GenInfDefineStateMent(PciRevision.Comments.GetHeaderComments(),
@@ -109,7 +109,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
PciRevision.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
- if PciCompress != None:
+ if PciCompress is not None:
Name = DT.TAB_INF_DEFINES_PCI_COMPRESS
Value = PciCompress.GetValue()
Statement = _GenInfDefineStateMent(PciCompress.Comments.GetHeaderComments(),
@@ -138,7 +138,7 @@ def GenModuleHeaderUserExt(DefineObj, ArchString):
DefinesDictNew[Statement] = ArchString
- if UefiHiiResourceSection != None:
+ if UefiHiiResourceSection is not None:
Name = DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION
Value = UefiHiiResourceSection.GetValue()
HeaderComment = UefiHiiResourceSection.Comments.GetHeaderComments()
diff --git a/BaseTools/Source/Python/UPT/UPT.py b/BaseTools/Source/Python/UPT/UPT.py
index 325b96bf560d..09653cdce95f 100644
--- a/BaseTools/Source/Python/UPT/UPT.py
+++ b/BaseTools/Source/Python/UPT/UPT.py
@@ -90,7 +90,7 @@ def SetLogLevel(Opt):
Logger.SetLevel(Logger.VERBOSE)
elif Opt.opt_quiet:
Logger.SetLevel(Logger.QUIET + 1)
- elif Opt.debug_level != None:
+ elif Opt.debug_level is not None:
if Opt.debug_level < 0 or Opt.debug_level > 9:
Logger.Warn("UPT", ST.ERR_DEBUG_LEVEL)
Logger.SetLevel(Logger.INFO)
diff --git a/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py b/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
index 4dc1c7edab3d..51ac48aca58e 100644
--- a/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
+++ b/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
@@ -550,7 +550,7 @@ class ModulePropertyXml(object):
Hob = Axml.FromXml(SubItem, 'HOB')
self.HOBs.append(Hob)
- if Header == None:
+ if Header is None:
Header = ModuleObject()
Header.SetModuleType(self.ModuleType)
diff --git a/BaseTools/Source/Python/UPT/Xml/XmlParser.py b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
index 58959081d0ab..dba3b7f5892c 100644
--- a/BaseTools/Source/Python/UPT/Xml/XmlParser.py
+++ b/BaseTools/Source/Python/UPT/Xml/XmlParser.py
@@ -162,7 +162,7 @@ class DistributionPackageXml(object):
def FromXml(self, Filename=None):
- if Filename != None:
+ if Filename is not None:
self.DistP = DistributionPackageClass()
#
# Load to XML
@@ -227,7 +227,7 @@ class DistributionPackageXml(object):
def ToXml(self, DistP):
if self.DistP:
pass
- if DistP != None:
+ if DistP is not None:
#
# Parse DistributionPackageHeader
#
@@ -344,7 +344,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Guids']
for Item in Module.GetGuidList():
- if Item == None:
+ if Item is None:
CheckDict = {'GuidCName':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -369,7 +369,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Protocols']
for Item in Module.GetProtocolList():
- if Item == None:
+ if Item is None:
CheckDict = {'Protocol':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -384,7 +384,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PPIs']
for Item in Module.GetPpiList():
- if Item == None:
+ if Item is None:
CheckDict = {'Ppi':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -399,7 +399,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded']
for Item in Module.GetPcdList():
- if Item == None:
+ if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -416,7 +416,7 @@ def ValidateMS1(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['Externs']
for Item in Module.GetExternList():
- if Item == None:
+ if Item is None:
CheckDict = {'Extern':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -536,7 +536,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions']
for Item in Module.GetLibraryClassList():
- if Item == None:
+ if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -608,7 +608,7 @@ def ValidateMS2(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
for Item in Module.GetSourceFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'Filename':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -636,7 +636,7 @@ def ValidateMS3(Module, TopXmlTreeLevel):
#
XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies']
for Item in Module.GetPackageDependencyList():
- if Item == None:
+ if Item is None:
CheckDict = {'Package':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -649,7 +649,7 @@ def ValidateMS3(Module, TopXmlTreeLevel):
# Check BinaryFiles -> BinaryFile
#
for Item in Module.GetBinaryFileList():
- if Item == None:
+ if Item is None:
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles']
CheckDict = {'BinaryFile':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -772,7 +772,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ClonedFrom']
for Item in Package.GetClonedFromList():
- if Item == None:
+ if Item is None:
CheckDict = Sdict()
CheckDict['GUID'] = ''
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -787,7 +787,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations']
for Item in Package.GetLibraryClassList():
- if Item == None:
+ if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -802,7 +802,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes']
for Item in Package.GetStandardIncludeFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'IndustryStandardHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -816,7 +816,7 @@ def ValidatePS1(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes']
for Item in Package.GetPackageIncludeFileList():
- if Item == None:
+ if Item is None:
CheckDict = {'PackageHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -842,7 +842,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations']
for Item in Package.GetGuidList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -857,7 +857,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations']
for Item in Package.GetProtocolList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -872,7 +872,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations']
for Item in Package.GetPpiList():
- if Item == None:
+ if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
@@ -887,7 +887,7 @@ def ValidatePS2(Package):
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations']
for Item in Package.GetPcdList():
- if Item == None:
+ if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
index a306dc0b2304..90c8246806d8 100644
--- a/BaseTools/Source/Python/Workspace/BuildClassObject.py
+++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -212,7 +212,7 @@ class LibraryClassObject(object):
def __init__(self, Name = None, SupModList = [], Type = None):
self.LibraryClass = Name
self.SupModList = SupModList
- if Type != None:
+ if Type is not None:
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
## ModuleBuildClassObject
diff --git a/BaseTools/Source/Python/Workspace/DecBuildData.py b/BaseTools/Source/Python/Workspace/DecBuildData.py
index 4d6edadc8f9d..49ef1df4aa76 100644
--- a/BaseTools/Source/Python/Workspace/DecBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DecBuildData.py
@@ -107,7 +107,7 @@ class DecBuildData(PackageBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
self.__Macros.update(GlobalData.gGlobalDefines)
return self.__Macros
@@ -145,34 +145,34 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve package name
def _GetPackageName(self):
- if self._PackageName == None:
- if self._Header == None:
+ if self._PackageName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._PackageName == None:
+ if self._PackageName is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
return self._PackageName
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
+ if self._Guid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
return self._Guid
## Retrieve package version
def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
+ if self._Version is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
self._Version = ''
return self._Version
## Retrieve protocol definitions (name/value pairs)
def _GetProtocol(self):
- if self._Protocols == None:
+ if self._Protocols is None:
#
# tdict is a special kind of dict, used for selecting correct
# protocol defition for given ARCH
@@ -214,7 +214,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve PPI definitions (name/value pairs)
def _GetPpi(self):
- if self._Ppis == None:
+ if self._Ppis is None:
#
# tdict is a special kind of dict, used for selecting correct
# PPI defition for given ARCH
@@ -256,7 +256,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve GUID definitions (name/value pairs)
def _GetGuid(self):
- if self._Guids == None:
+ if self._Guids is None:
#
# tdict is a special kind of dict, used for selecting correct
# GUID defition for given ARCH
@@ -298,7 +298,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve public include paths declared in this package
def _GetInclude(self):
- if self._Includes == None or self._CommonIncludes is None:
+ if self._Includes is None or self._CommonIncludes is None:
self._CommonIncludes = []
self._Includes = []
self._PrivateIncludes = []
@@ -333,7 +333,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve library class declarations (not used in build at present)
def _GetLibraryClass(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
#
# tdict is a special kind of dict, used for selecting correct
# library class declaration for given ARCH
@@ -357,7 +357,7 @@ class DecBuildData(PackageBuildClassObject):
## Retrieve PCD declarations
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
@@ -422,7 +422,7 @@ class DecBuildData(PackageBuildClassObject):
# will automatically turn to 'common' ARCH and try again
#
Setting,LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
@@ -454,7 +454,7 @@ class DecBuildData(PackageBuildClassObject):
StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
for pcd in Pcds.values():
if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
- if StructPattern.match(pcd.DatumType) == None:
+ if StructPattern.match(pcd.DatumType) is None:
EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", pcd.DefinitionPosition[0],pcd.DefinitionPosition[1])
for struct_pcd in Pcds.values():
if isinstance(struct_pcd,StructurePcd) and not struct_pcd.StructuredPcdIncludeFile:
diff --git a/BaseTools/Source/Python/Workspace/DscBuildData.py b/BaseTools/Source/Python/Workspace/DscBuildData.py
index 1cc4dc966203..cf9608651269 100644
--- a/BaseTools/Source/Python/Workspace/DscBuildData.py
+++ b/BaseTools/Source/Python/Workspace/DscBuildData.py
@@ -298,7 +298,7 @@ class DscBuildData(PlatformBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
self.__Macros.update(GlobalData.gPlatformDefines)
self.__Macros.update(GlobalData.gGlobalDefines)
@@ -368,7 +368,7 @@ class DscBuildData(PlatformBuildClassObject):
elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
self._BuildTargets = GetSplitValueList(Record[2])
elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
- if self._SkuName == None:
+ if self._SkuName is None:
self._SkuName = Record[2]
if GlobalData.gSKUID_CMD:
self._SkuName = GlobalData.gSKUID_CMD
@@ -427,76 +427,76 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve platform name
def _GetPlatformName(self):
- if self._PlatformName == None:
- if self._Header == None:
+ if self._PlatformName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._PlatformName == None:
+ if self._PlatformName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
return self._PlatformName
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
+ if self._Guid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_GUID", File=self.MetaFile)
return self._Guid
## Retrieve platform version
def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
+ if self._Version is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
return self._Version
## Retrieve platform description file version
def _GetDscSpec(self):
- if self._DscSpecification == None:
- if self._Header == None:
+ if self._DscSpecification is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._DscSpecification == None:
+ if self._DscSpecification is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
return self._DscSpecification
## Retrieve OUTPUT_DIRECTORY
def _GetOutpuDir(self):
- if self._OutputDirectory == None:
- if self._Header == None:
+ if self._OutputDirectory is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._OutputDirectory == None:
+ if self._OutputDirectory is None:
self._OutputDirectory = os.path.join("Build", self._PlatformName)
return self._OutputDirectory
## Retrieve SUPPORTED_ARCHITECTURES
def _GetSupArch(self):
- if self._SupArchList == None:
- if self._Header == None:
+ if self._SupArchList is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._SupArchList == None:
+ if self._SupArchList is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No SUPPORTED_ARCHITECTURES", File=self.MetaFile)
return self._SupArchList
## Retrieve BUILD_TARGETS
def _GetBuildTarget(self):
- if self._BuildTargets == None:
- if self._Header == None:
+ if self._BuildTargets is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BuildTargets == None:
+ if self._BuildTargets is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BUILD_TARGETS", File=self.MetaFile)
return self._BuildTargets
def _GetPcdInfoFlag(self):
- if self._PcdInfoFlag == None or self._PcdInfoFlag.upper() == 'FALSE':
+ if self._PcdInfoFlag is None or self._PcdInfoFlag.upper() == 'FALSE':
return False
elif self._PcdInfoFlag.upper() == 'TRUE':
return True
else:
return False
def _GetVarCheckFlag(self):
- if self._VarCheckFlag == None or self._VarCheckFlag.upper() == 'FALSE':
+ if self._VarCheckFlag is None or self._VarCheckFlag.upper() == 'FALSE':
return False
elif self._VarCheckFlag.upper() == 'TRUE':
return True
@@ -505,10 +505,10 @@ class DscBuildData(PlatformBuildClassObject):
# # Retrieve SKUID_IDENTIFIER
def _GetSkuName(self):
- if self._SkuName == None:
- if self._Header == None:
+ if self._SkuName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._SkuName == None:
+ if self._SkuName is None:
self._SkuName = 'DEFAULT'
return self._SkuName
@@ -517,72 +517,72 @@ class DscBuildData(PlatformBuildClassObject):
self._SkuName = Value
def _GetFdfFile(self):
- if self._FlashDefinition == None:
- if self._Header == None:
+ if self._FlashDefinition is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._FlashDefinition == None:
+ if self._FlashDefinition is None:
self._FlashDefinition = ''
return self._FlashDefinition
def _GetPrebuild(self):
- if self._Prebuild == None:
- if self._Header == None:
+ if self._Prebuild is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Prebuild == None:
+ if self._Prebuild is None:
self._Prebuild = ''
return self._Prebuild
def _GetPostbuild(self):
- if self._Postbuild == None:
- if self._Header == None:
+ if self._Postbuild is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._Postbuild == None:
+ if self._Postbuild is None:
self._Postbuild = ''
return self._Postbuild
## Retrieve FLASH_DEFINITION
def _GetBuildNumber(self):
- if self._BuildNumber == None:
- if self._Header == None:
+ if self._BuildNumber is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BuildNumber == None:
+ if self._BuildNumber is None:
self._BuildNumber = ''
return self._BuildNumber
## Retrieve MAKEFILE_NAME
def _GetMakefileName(self):
- if self._MakefileName == None:
- if self._Header == None:
+ if self._MakefileName is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._MakefileName == None:
+ if self._MakefileName is None:
self._MakefileName = ''
return self._MakefileName
## Retrieve BsBaseAddress
def _GetBsBaseAddress(self):
- if self._BsBaseAddress == None:
- if self._Header == None:
+ if self._BsBaseAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._BsBaseAddress == None:
+ if self._BsBaseAddress is None:
self._BsBaseAddress = ''
return self._BsBaseAddress
## Retrieve RtBaseAddress
def _GetRtBaseAddress(self):
- if self._RtBaseAddress == None:
- if self._Header == None:
+ if self._RtBaseAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._RtBaseAddress == None:
+ if self._RtBaseAddress is None:
self._RtBaseAddress = ''
return self._RtBaseAddress
## Retrieve the top address for the load fix address
def _GetLoadFixAddress(self):
- if self._LoadFixAddress == None:
- if self._Header == None:
+ if self._LoadFixAddress is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._LoadFixAddress == None:
+ if self._LoadFixAddress is None:
self._LoadFixAddress = self._Macros.get(TAB_FIX_LOAD_TOP_MEMORY_ADDRESS, '0')
try:
@@ -608,33 +608,33 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve RFCLanguage filter
def _GetRFCLanguages(self):
- if self._RFCLanguages == None:
- if self._Header == None:
+ if self._RFCLanguages is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._RFCLanguages == None:
+ if self._RFCLanguages is None:
self._RFCLanguages = []
return self._RFCLanguages
## Retrieve ISOLanguage filter
def _GetISOLanguages(self):
- if self._ISOLanguages == None:
- if self._Header == None:
+ if self._ISOLanguages is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._ISOLanguages == None:
+ if self._ISOLanguages is None:
self._ISOLanguages = []
return self._ISOLanguages
## Retrieve the GUID string for VPD tool
def _GetVpdToolGuid(self):
- if self._VpdToolGuid == None:
- if self._Header == None:
+ if self._VpdToolGuid is None:
+ if self._Header is None:
self._GetHeaderInfo()
- if self._VpdToolGuid == None:
+ if self._VpdToolGuid is None:
self._VpdToolGuid = ''
return self._VpdToolGuid
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
- if self._SkuIds == None:
+ if self._SkuIds is None:
self._SkuIds = sdict()
RecordList = self._RawData[MODEL_EFI_SKU_ID, self._Arch]
for Record in RecordList:
@@ -646,7 +646,7 @@ class DscBuildData(PlatformBuildClassObject):
File=self.MetaFile, Line=Record[-1])
Pattern = re.compile('^[1-9]\d*|0$')
HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
- if Pattern.match(Record[0]) == None and HexPattern.match(Record[0]) == None:
+ if Pattern.match(Record[0]) is None and HexPattern.match(Record[0]) is None:
EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not IsValidWord(Record[1]):
@@ -661,7 +661,7 @@ class DscBuildData(PlatformBuildClassObject):
def ToInt(self,intstr):
return int(intstr,16) if intstr.upper().startswith("0X") else int(intstr)
def _GetDefaultStores(self):
- if self.DefaultStores == None:
+ if self.DefaultStores is None:
self.DefaultStores = sdict()
RecordList = self._RawData[MODEL_EFI_DEFAULT_STORES, self._Arch]
for Record in RecordList:
@@ -673,7 +673,7 @@ class DscBuildData(PlatformBuildClassObject):
File=self.MetaFile, Line=Record[-1])
Pattern = re.compile('^[1-9]\d*|0$')
HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
- if Pattern.match(Record[0]) == None and HexPattern.match(Record[0]) == None:
+ if Pattern.match(Record[0]) is None and HexPattern.match(Record[0]) is None:
EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not IsValidWord(Record[1]):
@@ -689,7 +689,7 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve [Components] section information
def _GetModules(self):
- if self._Modules != None:
+ if self._Modules is not None:
return self._Modules
self._Modules = sdict()
@@ -788,13 +788,13 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve all possible library instances used in this platform
def _GetLibraryInstances(self):
- if self._LibraryInstances == None:
+ if self._LibraryInstances is None:
self._GetLibraryClasses()
return self._LibraryInstances
## Retrieve [LibraryClasses] information
def _GetLibraryClasses(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
self._LibraryInstances = []
#
# tdict is a special dict kind of type, used for selecting correct
@@ -832,7 +832,7 @@ class DscBuildData(PlatformBuildClassObject):
# try all possible module types
for ModuleType in SUP_MODULE_LIST:
LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
- if LibraryInstance == None:
+ if LibraryInstance is None:
continue
self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
@@ -859,7 +859,7 @@ class DscBuildData(PlatformBuildClassObject):
return self._LibraryClasses
def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
- if self._DecPcds == None:
+ if self._DecPcds is None:
FdfInfList = []
if GlobalData.gFdfParser:
@@ -1121,7 +1121,7 @@ class DscBuildData(PlatformBuildClassObject):
## Retrieve all PCD settings in platform
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self.__ParsePcdFromCommandLine()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
@@ -1156,7 +1156,7 @@ class DscBuildData(PlatformBuildClassObject):
print "PcdCName: %s, SkuName: %s, Value: %s" % (".".join((pcdobj.TokenSpaceGuidCName, pcdobj.TokenCName)), skuid,str(pcdobj.SkuInfoList[skuid].DefaultValue))
## Retrieve [BuildOptions]
def _GetBuildOptions(self):
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
#
# Retrieve build option for EDKII and EDK style module
@@ -1178,7 +1178,7 @@ class DscBuildData(PlatformBuildClassObject):
return self._BuildOptions
def GetBuildOptionsByModuleType(self, Edk, ModuleType):
- if self._ModuleTypeOptions == None:
+ if self._ModuleTypeOptions is None:
self._ModuleTypeOptions = sdict()
if (Edk, ModuleType) not in self._ModuleTypeOptions:
options = sdict()
@@ -1471,7 +1471,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid, SkuName]
- if Setting == None:
+ if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
if (PcdCName, TokenSpaceGuid) in PcdValueDict:
@@ -2256,7 +2256,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
@@ -2428,7 +2428,7 @@ class DscBuildData(PlatformBuildClassObject):
for PcdCName, TokenSpaceGuid, SkuName,DefaultStore, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid,DefaultStore]
- if Setting == None:
+ if Setting is None:
continue
VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
@@ -2498,7 +2498,7 @@ class DscBuildData(PlatformBuildClassObject):
pcd.DatumType = pcdDecObject.DatumType
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
- if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue == None):
+ if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue is None):
sku.HiiDefaultValue = pcdDecObject.DefaultValue
for default_store in sku.DefaultStoreDict:
sku.DefaultStoreDict[default_store]=pcdDecObject.DefaultValue
@@ -2582,7 +2582,7 @@ class DscBuildData(PlatformBuildClassObject):
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
#
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
@@ -2691,7 +2691,7 @@ class DscBuildData(PlatformBuildClassObject):
self.Pcds[Name, Guid].DefaultValue = Value
@property
def DecPcds(self):
- if self._DecPcds == None:
+ if self._DecPcds is None:
FdfInfList = []
if GlobalData.gFdfParser:
FdfInfList = GlobalData.gFdfParser.Profile.InfList
diff --git a/BaseTools/Source/Python/Workspace/InfBuildData.py b/BaseTools/Source/Python/Workspace/InfBuildData.py
index 7ea9b56d5dec..ded8f610c9c1 100644
--- a/BaseTools/Source/Python/Workspace/InfBuildData.py
+++ b/BaseTools/Source/Python/Workspace/InfBuildData.py
@@ -179,7 +179,7 @@ class InfBuildData(ModuleBuildClassObject):
## Get current effective macros
def _GetMacros(self):
- if self.__Macros == None:
+ if self.__Macros is None:
self.__Macros = {}
# EDK_GLOBAL defined macros can be applied to EDK module
if self.AutoGenVersion < 0x00010005:
@@ -246,7 +246,7 @@ class InfBuildData(ModuleBuildClassObject):
# items defined _PROPERTY_ don't need additional processing
if Name in self:
self[Name] = Value
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
self._Defs[Name] = Value
self._Macros[Name] = Value
@@ -254,15 +254,15 @@ class InfBuildData(ModuleBuildClassObject):
elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
Name = 'UEFI_SPECIFICATION_VERSION'
- if self._Specification == None:
+ if self._Specification is None:
self._Specification = sdict()
self._Specification[Name] = GetHexVerValue(Value)
- if self._Specification[Name] == None:
+ if self._Specification[Name] is None:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"'%s' format is not supported for %s" % (Value, Name),
File=self.MetaFile, Line=Record[-1])
elif Name == 'LIBRARY_CLASS':
- if self._LibraryClass == None:
+ if self._LibraryClass is None:
self._LibraryClass = []
ValueList = GetSplitValueList(Value)
LibraryClass = ValueList[0]
@@ -272,30 +272,30 @@ class InfBuildData(ModuleBuildClassObject):
SupModuleList = SUP_MODULE_LIST
self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
elif Name == 'ENTRY_POINT':
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
self._ModuleEntryPointList.append(Value)
elif Name == 'UNLOAD_IMAGE':
- if self._ModuleUnloadImageList == None:
+ if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
if not Value:
continue
self._ModuleUnloadImageList.append(Value)
elif Name == 'CONSTRUCTOR':
- if self._ConstructorList == None:
+ if self._ConstructorList is None:
self._ConstructorList = []
if not Value:
continue
self._ConstructorList.append(Value)
elif Name == 'DESTRUCTOR':
- if self._DestructorList == None:
+ if self._DestructorList is None:
self._DestructorList = []
if not Value:
continue
self._DestructorList.append(Value)
elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
TokenList = GetSplitValueList(Value)
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
if len(TokenList) < 2:
self._CustomMakefile['MSFT'] = TokenList[0]
@@ -307,7 +307,7 @@ class InfBuildData(ModuleBuildClassObject):
File=self.MetaFile, Line=Record[-1])
self._CustomMakefile[TokenList[0]] = TokenList[1]
else:
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
self._Defs[Name] = Value
self._Macros[Name] = Value
@@ -329,10 +329,10 @@ class InfBuildData(ModuleBuildClassObject):
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
File=self.MetaFile, Line=LineNo)
- if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
if self._ModuleType == SUP_MODULE_SMM_CORE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
- if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
+ if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
if self._ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
if self._ModuleType == SUP_MODULE_MM_STANDALONE:
@@ -357,7 +357,7 @@ class InfBuildData(ModuleBuildClassObject):
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
+ if self.Sources is None:
self._Sources = []
self._Sources.append(File)
else:
@@ -377,7 +377,7 @@ class InfBuildData(ModuleBuildClassObject):
for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:
Value = ReplaceMacro(Value, Macros, True)
if Name == "IMAGE_ENTRY_POINT":
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
self._ModuleEntryPointList.append(Value)
elif Name == "DPX_SOURCE":
@@ -387,7 +387,7 @@ class InfBuildData(ModuleBuildClassObject):
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
+ if self.Sources is None:
self._Sources = []
self._Sources.append(File)
else:
@@ -397,7 +397,7 @@ class InfBuildData(ModuleBuildClassObject):
# EdkLogger.warn("build", "Don't know how to do with macro [%s]" % Name,
# File=self.MetaFile, Line=LineNo)
else:
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
if ToolList[0] in self._TOOL_CODE_:
@@ -424,7 +424,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve file version
def _GetInfVersion(self):
- if self._AutoGenVersion == None:
+ if self._AutoGenVersion is None:
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
for Record in RecordList:
if Record[1] == TAB_INF_DEFINES_INF_VERSION:
@@ -436,34 +436,34 @@ class InfBuildData(ModuleBuildClassObject):
else:
self._AutoGenVersion = int(Record[2], 0)
break
- if self._AutoGenVersion == None:
+ if self._AutoGenVersion is None:
self._AutoGenVersion = 0x00010000
return self._AutoGenVersion
## Retrieve BASE_NAME
def _GetBaseName(self):
- if self._BaseName == None:
- if self._Header_ == None:
+ if self._BaseName is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._BaseName == None:
+ if self._BaseName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
return self._BaseName
## Retrieve DxsFile
def _GetDxsFile(self):
- if self._DxsFile == None:
- if self._Header_ == None:
+ if self._DxsFile is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._DxsFile == None:
+ if self._DxsFile is None:
self._DxsFile = ''
return self._DxsFile
## Retrieve MODULE_TYPE
def _GetModuleType(self):
- if self._ModuleType == None:
- if self._Header_ == None:
+ if self._ModuleType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleType == None:
+ if self._ModuleType is None:
self._ModuleType = 'BASE'
if self._ModuleType not in SUP_MODULE_LIST:
self._ModuleType = "USER_DEFINED"
@@ -471,17 +471,17 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve COMPONENT_TYPE
def _GetComponentType(self):
- if self._ComponentType == None:
- if self._Header_ == None:
+ if self._ComponentType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ComponentType == None:
+ if self._ComponentType is None:
self._ComponentType = 'USER_DEFINED'
return self._ComponentType
## Retrieve "BUILD_TYPE"
def _GetBuildType(self):
- if self._BuildType == None:
- if self._Header_ == None:
+ if self._BuildType is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
if not self._BuildType:
self._BuildType = "BASE"
@@ -489,37 +489,37 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve file guid
def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header_ == None:
+ if self._Guid is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Guid == None:
+ if self._Guid is None:
self._Guid = '00000000-0000-0000-0000-000000000000'
return self._Guid
## Retrieve module version
def _GetVersion(self):
- if self._Version == None:
- if self._Header_ == None:
+ if self._Version is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Version == None:
+ if self._Version is None:
self._Version = '0.0'
return self._Version
## Retrieve PCD_IS_DRIVER
def _GetPcdIsDriver(self):
- if self._PcdIsDriver == None:
- if self._Header_ == None:
+ if self._PcdIsDriver is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._PcdIsDriver == None:
+ if self._PcdIsDriver is None:
self._PcdIsDriver = ''
return self._PcdIsDriver
## Retrieve SHADOW
def _GetShadow(self):
- if self._Shadow == None:
- if self._Header_ == None:
+ if self._Shadow is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Shadow != None and self._Shadow.upper() == 'TRUE':
+ if self._Shadow is not None and self._Shadow.upper() == 'TRUE':
self._Shadow = True
else:
self._Shadow = False
@@ -527,79 +527,79 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve CUSTOM_MAKEFILE
def _GetMakefile(self):
- if self._CustomMakefile == None:
- if self._Header_ == None:
+ if self._CustomMakefile is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._CustomMakefile == None:
+ if self._CustomMakefile is None:
self._CustomMakefile = {}
return self._CustomMakefile
## Retrieve EFI_SPECIFICATION_VERSION
def _GetSpec(self):
- if self._Specification == None:
- if self._Header_ == None:
+ if self._Specification is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Specification == None:
+ if self._Specification is None:
self._Specification = {}
return self._Specification
## Retrieve LIBRARY_CLASS
def _GetLibraryClass(self):
- if self._LibraryClass == None:
- if self._Header_ == None:
+ if self._LibraryClass is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._LibraryClass == None:
+ if self._LibraryClass is None:
self._LibraryClass = []
return self._LibraryClass
## Retrieve ENTRY_POINT
def _GetEntryPoint(self):
- if self._ModuleEntryPointList == None:
- if self._Header_ == None:
+ if self._ModuleEntryPointList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleEntryPointList == None:
+ if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
return self._ModuleEntryPointList
## Retrieve UNLOAD_IMAGE
def _GetUnloadImage(self):
- if self._ModuleUnloadImageList == None:
- if self._Header_ == None:
+ if self._ModuleUnloadImageList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ModuleUnloadImageList == None:
+ if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
return self._ModuleUnloadImageList
## Retrieve CONSTRUCTOR
def _GetConstructor(self):
- if self._ConstructorList == None:
- if self._Header_ == None:
+ if self._ConstructorList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._ConstructorList == None:
+ if self._ConstructorList is None:
self._ConstructorList = []
return self._ConstructorList
## Retrieve DESTRUCTOR
def _GetDestructor(self):
- if self._DestructorList == None:
- if self._Header_ == None:
+ if self._DestructorList is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._DestructorList == None:
+ if self._DestructorList is None:
self._DestructorList = []
return self._DestructorList
## Retrieve definies other than above ones
def _GetDefines(self):
- if self._Defs == None:
- if self._Header_ == None:
+ if self._Defs is None:
+ if self._Header_ is None:
self._GetHeaderInfo()
- if self._Defs == None:
+ if self._Defs is None:
self._Defs = sdict()
return self._Defs
## Retrieve binary files
def _GetBinaries(self):
- if self._Binaries == None:
+ if self._Binaries is None:
self._Binaries = []
RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
Macros = self._Macros
@@ -646,7 +646,7 @@ class InfBuildData(ModuleBuildClassObject):
self._Sources = []
return self._Sources
- if self._Sources == None:
+ if self._Sources is None:
self._Sources = []
RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
Macros = self._Macros
@@ -687,7 +687,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve library classes employed by this module
def _GetLibraryClassUses(self):
- if self._LibraryClasses == None:
+ if self._LibraryClasses is None:
self._LibraryClasses = sdict()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
for Record in RecordList:
@@ -700,7 +700,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve library names (for Edk.x style of modules)
def _GetLibraryNames(self):
- if self._Libraries == None:
+ if self._Libraries is None:
self._Libraries = []
RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
for Record in RecordList:
@@ -716,14 +716,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._ProtocolComments
## Retrieve protocols consumed/produced by this module
def _GetProtocols(self):
- if self._Protocols == None:
+ if self._Protocols is None:
self._Protocols = sdict()
self._ProtocolComments = sdict()
RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = ProtocolValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Protocol [%s] is not found under [Protocols] section in" % CName,
@@ -741,14 +741,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._PpiComments
## Retrieve PPIs consumed/produced by this module
def _GetPpis(self):
- if self._Ppis == None:
+ if self._Ppis is None:
self._Ppis = sdict()
self._PpiComments = sdict()
RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = PpiValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of PPI [%s] is not found under [Ppis] section in " % CName,
@@ -766,14 +766,14 @@ class InfBuildData(ModuleBuildClassObject):
return self._GuidComments
## Retrieve GUIDs consumed/produced by this module
def _GetGuids(self):
- if self._Guids == None:
+ if self._Guids is None:
self._Guids = sdict()
self._GuidComments = sdict()
RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = GuidValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % CName,
@@ -788,7 +788,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve include paths necessary for this module (for Edk.x style of modules)
def _GetIncludes(self):
- if self._Includes == None:
+ if self._Includes is None:
self._Includes = []
if self._SourceOverridePath:
self._Includes.append(self._SourceOverridePath)
@@ -845,7 +845,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve packages this module depends on
def _GetPackages(self):
- if self._Packages == None:
+ if self._Packages is None:
self._Packages = []
RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
Macros = self._Macros
@@ -868,7 +868,7 @@ class InfBuildData(ModuleBuildClassObject):
return self._PcdComments
## Retrieve PCDs used in this module
def _GetPcds(self):
- if self._Pcds == None:
+ if self._Pcds is None:
self._Pcds = sdict()
self._PcdComments = sdict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
@@ -880,7 +880,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve build options specific to this module
def _GetBuildOptions(self):
- if self._BuildOptions == None:
+ if self._BuildOptions is None:
self._BuildOptions = sdict()
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
for Record in RecordList:
@@ -897,13 +897,13 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve dependency expression
def _GetDepex(self):
- if self._Depex == None:
+ if self._Depex is None:
self._Depex = tdict(False, 2)
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
# If the module has only Binaries and no Sources, then ignore [Depex]
- if self.Sources == None or self.Sources == []:
- if self.Binaries != None and self.Binaries != []:
+ if self.Sources is None or self.Sources == []:
+ if self.Binaries is not None and self.Binaries != []:
return self._Depex
# PEIM and DXE drivers must have a valid [Depex] section
@@ -935,18 +935,18 @@ class InfBuildData(ModuleBuildClassObject):
elif Token.endswith(".inf"): # module file name
ModuleFile = os.path.normpath(Token)
Module = self.BuildDatabase[ModuleFile]
- if Module == None:
+ if Module is None:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
ExtraData=Token, File=self.MetaFile, Line=Record[-1])
DepexList.append(Module.Guid)
else:
# get the GUID value now
Value = ProtocolValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
Value = PpiValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
Value = GuidValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of [%s] is not found in" % Token,
@@ -958,7 +958,7 @@ class InfBuildData(ModuleBuildClassObject):
## Retrieve depedency expression
def _GetDepexExpression(self):
- if self._DepexExpression == None:
+ if self._DepexExpression is None:
self._DepexExpression = tdict(False, 2)
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
DepexExpression = sdict()
@@ -989,7 +989,7 @@ class InfBuildData(ModuleBuildClassObject):
# get the guid value
if TokenSpaceGuid not in self.Guids:
Value = GuidValue(TokenSpaceGuid, self.Packages, self.MetaFile.Path)
- if Value == None:
+ if Value is None:
PackageList = "\n\t".join([str(P) for P in self.Packages])
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
@@ -1006,7 +1006,7 @@ class InfBuildData(ModuleBuildClassObject):
for PcdCName, TokenSpaceGuid in PcdList:
PcdRealName = PcdCName
Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
- if Setting == None:
+ if Setting is None:
continue
ValueList = AnalyzePcdData(Setting)
DefaultValue = ValueList[0]
@@ -1095,7 +1095,7 @@ class InfBuildData(ModuleBuildClassObject):
#
# Check whether the token value exist or not.
#
- if Pcd.TokenValue == None or Pcd.TokenValue == "":
+ if Pcd.TokenValue is None or Pcd.TokenValue == "":
EdkLogger.error(
'build',
FORMAT_INVALID,
@@ -1108,7 +1108,7 @@ class InfBuildData(ModuleBuildClassObject):
#
ReIsValidPcdTokenValue = re.compile(r"^[0][x|X][0]*[0-9a-fA-F]{1,8}$", re.DOTALL)
if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
- if ReIsValidPcdTokenValue.match(Pcd.TokenValue) == None:
+ if ReIsValidPcdTokenValue.match(Pcd.TokenValue) is None:
EdkLogger.error(
'build',
FORMAT_INVALID,
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py
index ee4ba6869f80..0cfec9023261 100644
--- a/BaseTools/Source/Python/Workspace/MetaDataTable.py
+++ b/BaseTools/Source/Python/Workspace/MetaDataTable.py
@@ -113,7 +113,7 @@ class Table(object):
SqlCommand = """select max(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
Id = Record[0][0]
- if Id == None:
+ if Id is None:
Id = self.IdBase
return Id
@@ -311,7 +311,7 @@ class TableDataModel(Table):
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
- if Count != None and Count != 0:
+ if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index 2eb4c75e4658..f4c1868483d9 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -241,7 +241,7 @@ class MetaFileParser(object):
self.Start()
# No specific ARCH or Platform given, use raw data
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch)
# Do post-process if necessary
@@ -620,7 +620,7 @@ class InfParser(MetaFileParser):
self._ValueList = ['', '', '']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
Comments = []
continue
@@ -952,7 +952,7 @@ class DscParser(MetaFileParser):
self._ValueList = ['', '', '']
self._SectionParser[SectionType](self)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
@@ -1361,7 +1361,7 @@ class DscParser(MetaFileParser):
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
- if self._ValueList == None:
+ if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
@@ -1740,7 +1740,7 @@ class DecParser(MetaFileParser):
# section content
self._ValueList = ['', '', '']
self._SectionParser[self._SectionType[0]](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py
index d8549c9d66e6..be3fb3d68856 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileTable.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py
@@ -140,11 +140,11 @@ class ModuleTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
- if Platform != None and Platform != 'COMMON':
+ if Platform is not None and Platform != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -221,7 +221,7 @@ class PackageTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
- if Arch != None and Arch != 'COMMON':
+ if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
@@ -341,9 +341,9 @@ class PlatformTable(MetaFileTable):
ConditionString = "Model=%s AND Enabled>0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,Scope3,ID,StartLine"
- if Scope1 != None and Scope1 != 'COMMON':
+ if Scope1 is not None and Scope1 != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
- if Scope2 != None and Scope2 != 'COMMON':
+ if Scope2 is not None and Scope2 != 'COMMON':
# Cover the case that CodeBase is 'COMMON' for BuildOptions section
if '.' in Scope2:
Index = Scope2.index('.')
@@ -352,12 +352,12 @@ class PlatformTable(MetaFileTable):
else:
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
- if BelongsToItem != None:
+ if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
else:
ConditionString += " AND BelongsToItem<0"
- if FromItem != None:
+ if FromItem is not None:
ConditionString += " AND FromItem=%s" % FromItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
index c760e57b8f64..abe34cf9a071 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
@@ -118,16 +118,16 @@ def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, To
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
else:
LibraryPath = Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
+ if LibraryPath is None or LibraryPath == "":
return []
LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
# for those forced library instance (NULL library), add a fake library class
if LibraryClassName.startswith("NULL"):
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
+ elif LibraryModule.LibraryClass is None \
or len(LibraryModule.LibraryClass) == 0 \
or (ModuleType != 'USER_DEFINED'
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
@@ -139,7 +139,7 @@ def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, To
else:
LibraryModule = LibraryInstance[LibraryClassName]
- if LibraryModule == None:
+ if LibraryModule is None:
continue
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
@@ -239,12 +239,12 @@ def _ResolveLibraryReference(Module, Platform):
M = LibraryConsumerList.pop()
for LibraryName in M.Libraries:
Library = Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
+ if Library is None:
for Key in Platform.LibraryClasses.data.keys():
if LibraryName.upper() == Key.upper():
Library = Platform.LibraryClasses[Key, ':dummy:']
break
- if Library == None:
+ if Library is None:
continue
if Library not in LibraryList:
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index a3407d113e0f..2b888c0610c3 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -214,7 +214,7 @@ class WorkspaceDatabase(object):
else:
curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
- if rootPath == "" or rootPath == None:
+ if rootPath == "" or rootPath is None:
EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
determine whether database file is out of date!\n")
@@ -308,13 +308,13 @@ determine whether database file is out of date!\n")
Platform = self.BuildObject[PathClass(PlatformFile), 'COMMON']
except:
Platform = None
- if Platform != None:
+ if Platform is not None:
PlatformList.append(Platform)
return PlatformList
def _MapPlatform(self, Dscfile):
Platform = self.BuildObject[PathClass(Dscfile), 'COMMON']
- if Platform == None:
+ if Platform is None:
EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
return Platform
diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py
index d555dce9b3bc..966a2aa5abf1 100644
--- a/BaseTools/Source/Python/build/BuildReport.py
+++ b/BaseTools/Source/Python/build/BuildReport.py
@@ -722,7 +722,7 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time
Line = From.readline()
# empty string means "end"
- if Line != None and Line != "":
+ if Line is not None and Line != "":
To(Line.rstrip())
else:
break
@@ -904,7 +904,7 @@ class PcdReport(object):
elif ReportSubType == 2:
PcdDict = self.UnusedPcds
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, gSectionStart)
if ReportSubType == 1:
FileWrite(File, "Conditional Directives used by the build system")
@@ -966,7 +966,7 @@ class PcdReport(object):
PcdValue = DecDefaultValue
if DscDefaultValue:
PcdValue = DscDefaultValue
- if ModulePcdSet != None:
+ if ModulePcdSet is not None:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type) not in ModulePcdSet:
continue
InfDefault, PcdValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
@@ -985,7 +985,7 @@ class PcdReport(object):
break
if First:
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, "")
FileWrite(File, Key)
First = False
@@ -993,35 +993,35 @@ class PcdReport(object):
if Pcd.DatumType in ('UINT8', 'UINT16', 'UINT32', 'UINT64'):
PcdValueNumber = int(PcdValue.strip(), 0)
- if DecDefaultValue == None:
+ if DecDefaultValue is None:
DecMatch = True
else:
DecDefaultValueNumber = int(DecDefaultValue.strip(), 0)
DecMatch = (DecDefaultValueNumber == PcdValueNumber)
- if InfDefaultValue == None:
+ if InfDefaultValue is None:
InfMatch = True
else:
InfDefaultValueNumber = int(InfDefaultValue.strip(), 0)
InfMatch = (InfDefaultValueNumber == PcdValueNumber)
- if DscDefaultValue == None:
+ if DscDefaultValue is None:
DscMatch = True
else:
DscDefaultValueNumber = int(DscDefaultValue.strip(), 0)
DscMatch = (DscDefaultValueNumber == PcdValueNumber)
else:
- if DecDefaultValue == None:
+ if DecDefaultValue is None:
DecMatch = True
else:
DecMatch = (DecDefaultValue.strip() == PcdValue.strip())
- if InfDefaultValue == None:
+ if InfDefaultValue is None:
InfMatch = True
else:
InfMatch = (InfDefaultValue.strip() == PcdValue.strip())
- if DscDefaultValue == None:
+ if DscDefaultValue is None:
DscMatch = True
else:
DscMatch = (DscDefaultValue.strip() == PcdValue.strip())
@@ -1087,7 +1087,7 @@ class PcdReport(object):
else:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*M')
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
if IsStructure:
continue
if not TypeName in ('PATCH', 'FLAG', 'FIXED'):
@@ -1111,7 +1111,7 @@ class PcdReport(object):
else:
FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 19, ModulePath, ModuleDefault.strip()))
- if ModulePcdSet == None:
+ if ModulePcdSet is None:
FileWrite(File, gSectionEnd)
else:
if not ReportSubType and ModulePcdSet:
@@ -1127,7 +1127,7 @@ class PcdReport(object):
return HasDscOverride
def PrintPcdDefault(self, File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue):
- if not DscMatch and DscDefaultValue != None:
+ if not DscMatch and DscDefaultValue is not None:
Value = DscDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1136,7 +1136,7 @@ class PcdReport(object):
FileWrite(File, '%s' % (Array))
else:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', Value))
- if not InfMatch and InfDefaultValue != None:
+ if not InfMatch and InfDefaultValue is not None:
Value = InfDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1146,7 +1146,7 @@ class PcdReport(object):
else:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', Value))
- if not DecMatch and DecDefaultValue != None:
+ if not DecMatch and DecDefaultValue is not None:
Value = DecDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
@@ -1971,7 +1971,7 @@ class PlatformReport(object):
self.PcdReport = PcdReport(Wa)
self.FdReportList = []
- if "FLASH" in ReportType and Wa.FdfProfile and MaList == None:
+ if "FLASH" in ReportType and Wa.FdfProfile and MaList is None:
for Fd in Wa.FdfProfile.FdDict:
self.FdReportList.append(FdReport(Wa.FdfProfile.FdDict[Fd], Wa))
@@ -1984,7 +1984,7 @@ class PlatformReport(object):
self.DepexParser = DepexParser(Wa)
self.ModuleReportList = []
- if MaList != None:
+ if MaList is not None:
self._IsModuleBuild = True
for Ma in MaList:
self.ModuleReportList.append(ModuleReport(Ma, ReportType))
@@ -1994,13 +1994,13 @@ class PlatformReport(object):
ModuleAutoGenList = []
for ModuleKey in Pa.Platform.Modules:
ModuleAutoGenList.append(Pa.Platform.Modules[ModuleKey].M)
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
if Pa.Arch in GlobalData.gFdfParser.Profile.InfDict:
INFList = GlobalData.gFdfParser.Profile.InfDict[Pa.Arch]
for InfName in INFList:
InfClass = PathClass(NormPath(InfName), Wa.WorkspaceDir, Pa.Arch)
Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile)
- if Ma == None:
+ if Ma is None:
continue
if Ma not in ModuleAutoGenList:
ModuleAutoGenList.append(Ma)
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index 85612d90ced1..f211f8c64116 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -241,7 +241,7 @@ def ReadMessage(From, To, ExitFlag):
# read one line a time
Line = From.readline()
# empty string means "end"
- if Line != None and Line != "":
+ if Line is not None and Line != "":
To(Line.rstrip())
else:
break
@@ -299,9 +299,9 @@ def LaunchCommand(Command, WorkingDir):
except: # in case of aborting
# terminate the threads redirecting the program output
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
- if EndOfProcedure != None:
+ if EndOfProcedure is not None:
EndOfProcedure.set()
- if Proc == None:
+ if Proc is None:
if type(Command) != type(""):
Command = " ".join(Command)
EdkLogger.error("build", COMMAND_FAILURE, "Failed to start command", ExtraData="%s [%s]" % (Command, WorkingDir))
@@ -375,7 +375,7 @@ class BuildUnit:
# @param Other The other BuildUnit object compared to
#
def __eq__(self, Other):
- return Other != None and self.BuildObject == Other.BuildObject \
+ return Other is not None and self.BuildObject == Other.BuildObject \
and self.BuildObject.Arch == Other.BuildObject.Arch
## hash() method
@@ -633,7 +633,7 @@ class BuildTask:
self.BuildItem = BuildItem
self.DependencyList = []
- if Dependency == None:
+ if Dependency is None:
Dependency = BuildItem.Dependency
else:
Dependency.extend(BuildItem.Dependency)
@@ -795,7 +795,7 @@ class Build():
BinCacheSource = mws.join(self.WorkspaceDir, BinCacheSource)
GlobalData.gBinCacheSource = BinCacheSource
else:
- if GlobalData.gBinCacheSource != None:
+ if GlobalData.gBinCacheSource is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-source.")
if GlobalData.gBinCacheDest:
@@ -804,7 +804,7 @@ class Build():
BinCacheDest = mws.join(self.WorkspaceDir, BinCacheDest)
GlobalData.gBinCacheDest = BinCacheDest
else:
- if GlobalData.gBinCacheDest != None:
+ if GlobalData.gBinCacheDest is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-destination.")
if self.ConfDirectory:
@@ -907,7 +907,7 @@ class Build():
# if no tool chain given in command line, get it from target.txt
if not self.ToolChainList:
self.ToolChainList = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
- if self.ToolChainList == None or len(self.ToolChainList) == 0:
+ if self.ToolChainList is None or len(self.ToolChainList) == 0:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.\n")
# check if the tool chains are defined or not
@@ -935,7 +935,7 @@ class Build():
ToolChainFamily.append(ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool])
self.ToolChainFamily = ToolChainFamily
- if self.ThreadNumber == None:
+ if self.ThreadNumber is None:
self.ThreadNumber = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
if self.ThreadNumber == '':
self.ThreadNumber = 0
@@ -1224,7 +1224,7 @@ class Build():
# for dependent modules/Libraries
#
def _BuildPa(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False, FfsCommand={}):
- if AutoGenObject == None:
+ if AutoGenObject is None:
return False
# skip file generation for cleanxxx targets, run and fds target
@@ -1252,7 +1252,7 @@ class Build():
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
- if BuildCommand == None or len(BuildCommand) == 0:
+ if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
@@ -1343,7 +1343,7 @@ class Build():
# for dependent modules/Libraries
#
def _Build(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False):
- if AutoGenObject == None:
+ if AutoGenObject is None:
return False
# skip file generation for cleanxxx targets, run and fds target
@@ -1372,7 +1372,7 @@ class Build():
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
- if BuildCommand == None or len(BuildCommand) == 0:
+ if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
@@ -1536,7 +1536,7 @@ class Build():
FvMap.readline()
for Line in FvMap:
MatchGuid = GuidPattern.match(Line)
- if MatchGuid != None:
+ if MatchGuid is not None:
#
# Replace GUID with module name
#
@@ -1548,7 +1548,7 @@ class Build():
# Add the debug image full path.
#
MatchGuid = GuidName.match(Line)
- if MatchGuid != None:
+ if MatchGuid is not None:
GuidString = MatchGuid.group().split("=")[1]
if GuidString.upper() in ModuleList:
MapBuffer.write('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))
@@ -1758,7 +1758,7 @@ class Build():
for Module in Pa.Platform.Modules:
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None:
+ if Ma is None:
continue
self.BuildModules.append(Ma)
self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict)
@@ -1778,7 +1778,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -1856,7 +1856,7 @@ class Build():
for Module in Pa.Platform.Modules:
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None: continue
+ if Ma is None: continue
MaList.append(Ma)
if Ma.CanSkipbyHash():
self.HashSkipModules.append(Ma)
@@ -1936,7 +1936,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -2021,13 +2021,13 @@ class Build():
AutoGenStart = time.time()
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
- if Pa == None:
+ if Pa is None:
continue
ModuleList = []
for Inf in Pa.Platform.Modules:
ModuleList.append(Inf)
# Add the INF only list in FDF
- if GlobalData.gFdfParser != None:
+ if GlobalData.gFdfParser is not None:
for InfName in GlobalData.gFdfParser.Profile.InfList:
Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)
if Inf in Pa.Platform.Modules:
@@ -2037,7 +2037,7 @@ class Build():
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile)
- if Ma == None:
+ if Ma is None:
continue
if Ma.CanSkipbyHash():
self.HashSkipModules.append(Ma)
@@ -2122,7 +2122,7 @@ class Build():
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
- if Ma == None:
+ if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
@@ -2263,18 +2263,18 @@ class Build():
FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gFileTimeStampCache")
if Utils.gFileTimeStampCache == {} and os.path.isfile(FilePath):
Utils.gFileTimeStampCache = Utils.DataRestore(FilePath)
- if Utils.gFileTimeStampCache == None:
+ if Utils.gFileTimeStampCache is None:
Utils.gFileTimeStampCache = {}
FilePath = os.path.join(os.path.dirname(GlobalData.gDatabasePath), "gDependencyDatabase")
if Utils.gDependencyDatabase == {} and os.path.isfile(FilePath):
Utils.gDependencyDatabase = Utils.DataRestore(FilePath)
- if Utils.gDependencyDatabase == None:
+ if Utils.gDependencyDatabase is None:
Utils.gDependencyDatabase = {}
def ParseDefines(DefineList=[]):
DefineDict = {}
- if DefineList != None:
+ if DefineList is not None:
for Define in DefineList:
DefineTokenList = Define.split("=", 1)
if not GlobalData.gMacroNamePattern.match(DefineTokenList[0]):
@@ -2403,16 +2403,16 @@ def Main():
GlobalData.gCaseInsensitive = Option.CaseInsensitive
# Set log level
- if Option.verbose != None:
+ if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.quiet != None:
+ elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.debug != None:
+ elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Option.LogFile != None:
+ if Option.LogFile is not None:
EdkLogger.SetLogFile(Option.LogFile)
if Option.WarningAsError == True:
@@ -2472,13 +2472,13 @@ def Main():
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
- if Option.PlatformFile != None:
+ if Option.PlatformFile is not None:
if os.path.isabs (Option.PlatformFile):
if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (Workspace) == 0:
Option.PlatformFile = NormFile(os.path.normpath(Option.PlatformFile), Workspace)
Option.PlatformFile = PathClass(Option.PlatformFile, Workspace)
- if Option.FdfFile != None:
+ if Option.FdfFile is not None:
if os.path.isabs (Option.FdfFile):
if os.path.normcase (os.path.normpath(Option.FdfFile)).find (Workspace) == 0:
Option.FdfFile = NormFile(os.path.normpath(Option.FdfFile), Workspace)
@@ -2487,7 +2487,7 @@ def Main():
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
- if Option.Flag != None and Option.Flag not in ['-c', '-s']:
+ if Option.Flag is not None and Option.Flag not in ['-c', '-s']:
EdkLogger.error("build", OPTION_VALUE_INVALID, "UNI flag must be one of -c or -s")
MyBuild = Build(Target, Workspace, Option)
@@ -2504,35 +2504,35 @@ def Main():
#
BuildError = False
except FatalError, X:
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
ReturnCode = X.args[0]
except Warning, X:
# error from Fdf parser
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
else:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
- if Option != None and Option.debug != None:
+ if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
except:
- if MyBuild != None:
+ if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
# try to get the meta-file from the object causing exception
Tb = sys.exc_info()[-1]
MetaFile = GlobalData.gProcessingFile
- while Tb != None:
+ while Tb is not None:
if 'self' in Tb.tb_frame.f_locals and hasattr(Tb.tb_frame.f_locals['self'], 'MetaFile'):
MetaFile = Tb.tb_frame.f_locals['self'].MetaFile
Tb = Tb.tb_next
@@ -2566,7 +2566,7 @@ def Main():
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
else:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
- if MyBuild != None:
+ if MyBuild is not None:
if not BuildError:
MyBuild.BuildReport.GenerateReport(BuildDurationStr, LogBuildTime(MyBuild.AutoGenTime), LogBuildTime(MyBuild.MakeTime), LogBuildTime(MyBuild.GenFdsTime))
MyBuild.Db.Close()
--
2.16.2.windows.1
^ permalink raw reply related [flat|nested] 2+ messages in thread
end of thread, other threads:[~2018-03-28 2:49 UTC | newest]
Thread overview: 2+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
[not found] <cover.1522095913.git.jaben.carsey@intel.com>
2018-03-26 20:25 ` [PATCH v1 1/1] BaseTools: Remove equality operator with None Jaben Carsey
2018-03-28 2:55 ` Zhu, Yonghong
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox