diff --git a/colorama/ansitowin32.py b/colorama/ansitowin32.py
index e7eb8441..ecbb08e7 100644
--- a/colorama/ansitowin32.py
+++ b/colorama/ansitowin32.py
@@ -7,9 +7,7 @@
from .win32 import windll
-winterm = None
-if windll is not None:
- winterm = WinTerm()
+winterm = WinTerm() if windll is not None else None
def is_a_tty(stream):
@@ -182,10 +180,7 @@ def call_win32(self, command, params):
func = winterm.erase_data
func(params, on_stderr=self.on_stderr)
elif command == 'A':
- if params == () or params == None:
- num_rows = 1
- else:
- num_rows = params[0]
+ num_rows = 1 if params == () or params is None else params[0]
func = winterm.cursor_up
func(num_rows, on_stderr=self.on_stderr)
diff --git a/colorama/winterm.py b/colorama/winterm.py
index 27088115..09dfd255 100644
--- a/colorama/winterm.py
+++ b/colorama/winterm.py
@@ -61,9 +61,7 @@ def style(self, style=None, on_stderr=False):
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
- handle = win32.STDOUT
- if on_stderr:
- handle = win32.STDERR
+ handle = win32.STDERR if on_stderr else win32.STDOUT
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
@@ -79,17 +77,13 @@ def set_cursor_position(self, position=None, on_stderr=False):
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
- handle = win32.STDOUT
- if on_stderr:
- handle = win32.STDERR
+ handle = win32.STDERR if on_stderr else win32.STDOUT
win32.SetConsoleCursorPosition(handle, position)
def cursor_up(self, num_rows=0, on_stderr=False):
if num_rows == 0:
return
- handle = win32.STDOUT
- if on_stderr:
- handle = win32.STDERR
+ handle = win32.STDERR if on_stderr else win32.STDOUT
position = self.get_position(handle)
adjusted_position = (position.Y - num_rows, position.X)
self.set_cursor_position(adjusted_position, on_stderr)
@@ -104,9 +98,7 @@ def erase_data(self, mode=0, on_stderr=False):
# and to do so relative to the cursor position.
if mode[0] not in (2,):
return
- handle = win32.STDOUT
- if on_stderr:
- handle = win32.STDERR
+ handle = win32.STDERR if on_stderr else win32.STDOUT
# here's where we'll home the cursor
coord_screen = win32.COORD(0,0)
csbi = win32.GetConsoleScreenBufferInfo(handle)
diff --git a/fips b/fips
index bdb15be9..beb966a8 100755
--- a/fips
+++ b/fips
@@ -1,9 +1,10 @@
#!/usr/bin/env python
"""fips main entry"""
+
import os
import sys
from mod import fips
fips_path = os.path.dirname(os.path.abspath(__file__))
proj_path = fips_path
-fips.run(fips_path, proj_path, sys.argv)
+fips.run(proj_path, proj_path, sys.argv)
diff --git a/generators/genutil.py b/generators/genutil.py
index 2a57ee52..49ad944f 100644
--- a/generators/genutil.py
+++ b/generators/genutil.py
@@ -8,11 +8,11 @@
Env = {}
#-------------------------------------------------------------------------------
-def error(msg) :
+def error(msg):
'''
Just print a simple error message and return with error code 10.
'''
- print("ERROR: {}".format(msg))
+ print(f"ERROR: {msg}")
sys.exit(10)
#-------------------------------------------------------------------------------
@@ -23,37 +23,37 @@ def setErrorLocation(filePath, lineNumber) :
LineNumber = lineNumber
#-------------------------------------------------------------------------------
-def fmtError(msg, terminate=True) :
+def fmtError(msg, terminate=True):
'''
Print an error message formatted so that IDEs can parse them,
and return with error code 10.
'''
- if platform.system() == 'Windows' :
- print('{}({}): error: {}'.format(FilePath, LineNumber + 1, msg))
- else :
- print('{}:{}:0: error: {}\n'.format(FilePath, LineNumber + 1, msg))
+ if platform.system() == 'Windows':
+ print(f'{FilePath}({LineNumber + 1}): error: {msg}')
+ else:
+ print(f'{FilePath}:{LineNumber + 1}:0: error: {msg}\n')
if terminate:
sys.exit(10)
#-------------------------------------------------------------------------------
-def fmtWarning(msg) :
+def fmtWarning(msg):
'''
Print an warning message formatted so that IDEs can parse them.
'''
- if platform.system() == 'Windows' :
- print('{}({}): warning: {}'.format(FilePath, LineNumber + 1, msg))
- else :
- print('{}:{}:0: warning: {}\n'.format(FilePath, LineNumber + 1, msg))
+ if platform.system() == 'Windows':
+ print(f'{FilePath}({LineNumber + 1}): warning: {msg}')
+ else:
+ print(f'{FilePath}:{LineNumber + 1}:0: warning: {msg}\n')
#-------------------------------------------------------------------------------
-def fileVersionDirty(filePath, version) :
+def fileVersionDirty(filePath, version):
'''
Reads the first 4 lines of a file, checks whether there's an
$$version:X statemenet in it, returns False if the version
number in the file is equal to the arg version.
'''
f = open(filePath, 'r')
- for i in range(0,4) :
+ for _ in range(0,4):
line = f.readline()
startIndex = line.find('#version:')
if startIndex != -1 :
diff --git a/mod/android.py b/mod/android.py
index 63ed1bcc..1369394e 100644
--- a/mod/android.py
+++ b/mod/android.py
@@ -27,8 +27,8 @@
}
#-------------------------------------------------------------------------------
-def get_sdk_dir(fips_dir) :
- return util.get_workspace_dir(fips_dir) + '/fips-sdks/android'
+def get_sdk_dir(fips_dir):
+ return f'{util.get_workspace_dir(fips_dir)}/fips-sdks/android'
#-------------------------------------------------------------------------------
def check_exists(fips_dir) :
@@ -37,7 +37,7 @@ def check_exists(fips_dir) :
#-------------------------------------------------------------------------------
def get_adb_path(fips_dir):
- return get_sdk_dir(fips_dir) + '/platform-tools/adb'
+ return f'{get_sdk_dir(fips_dir)}/platform-tools/adb'
#-------------------------------------------------------------------------------
def get_tools_url() :
@@ -45,7 +45,7 @@ def get_tools_url() :
#-------------------------------------------------------------------------------
def get_tools_archive_path(fips_dir):
- return get_sdk_dir(fips_dir) + '/' + tools_archives[util.get_host_platform()]
+ return f'{get_sdk_dir(fips_dir)}/{tools_archives[util.get_host_platform()]}'
#-------------------------------------------------------------------------------
# convert a cmake target into a valid Android package name,
@@ -58,10 +58,10 @@ def target_to_package_name(target):
#-------------------------------------------------------------------------------
def install_package(fips_dir, pkg):
- log.colored(log.BLUE, '>>> install Android SDK package: {}'.format(pkg))
- sdkmgr_dir = get_sdk_dir(fips_dir) + '/tools/bin/'
- sdkmgr_path = sdkmgr_dir + 'sdkmanager'
- cmd = '{} --verbose {}'.format(sdkmgr_path, pkg)
+ log.colored(log.BLUE, f'>>> install Android SDK package: {pkg}')
+ sdkmgr_dir = f'{get_sdk_dir(fips_dir)}/tools/bin/'
+ sdkmgr_path = f'{sdkmgr_dir}sdkmanager'
+ cmd = f'{sdkmgr_path} --verbose {pkg}'
subprocess.call(cmd, cwd=sdkmgr_dir, shell=True)
#-------------------------------------------------------------------------------
@@ -70,25 +70,23 @@ def ensure_sdk_dirs(fips_dir) :
os.makedirs(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
-def uncompress(fips_dir, path) :
+def uncompress(fips_dir, path):
# the python zip module doesn't preserve the executable flags, so just
# call unzip on Linux and OSX
if util.get_host_platform() in ['osx', 'linux']:
- subprocess.call('unzip {}'.format(path), cwd=get_sdk_dir(fips_dir), shell=True)
+ subprocess.call(f'unzip {path}', cwd=get_sdk_dir(fips_dir), shell=True)
else:
with zipfile.ZipFile(path, 'r') as archive:
archive.extractall(get_sdk_dir(fips_dir))
#-------------------------------------------------------------------------------
-def compute_sha256(path, converter=lambda x: x, chunk_size=65536) :
+def compute_sha256(path, converter=lambda x: x, chunk_size=65536):
if not os.path.isfile(path) :
return None
result = hashlib.sha256()
- with open(path, 'rb') as file :
- chunk = file.read(chunk_size)
- while chunk :
+ with open(path, 'rb') as file:
+ while chunk := file.read(chunk_size):
result.update(converter(chunk))
- chunk = file.read(chunk_size)
return result.hexdigest()
#-------------------------------------------------------------------------------
@@ -98,7 +96,7 @@ def strip_whitespace(bin_str) :
return bin_str
#-------------------------------------------------------------------------------
-def setup(fips_dir, proj_dir) :
+def setup(fips_dir, proj_dir):
"""setup the Android SDK and NDK"""
log.colored(log.YELLOW, '=== setup Android SDK/NDK :')
@@ -111,9 +109,9 @@ def setup(fips_dir, proj_dir) :
# download the command line tools archive
tools_archive_path = get_tools_archive_path(fips_dir)
tools_url = get_tools_url()
- log.info("downloading '{}'...".format(tools_url))
+ log.info(f"downloading '{tools_url}'...")
urlretrieve(tools_url, tools_archive_path, util.url_download_hook)
- log.info("\nunpacking '{}'...".format(tools_archive_path))
+ log.info(f"\nunpacking '{tools_archive_path}'...")
uncompress(fips_dir, tools_archive_path)
# install the required SDK components through sdkmanager
@@ -123,8 +121,8 @@ def setup(fips_dir, proj_dir) :
install_package(fips_dir, 'ndk-bundle')
# check for potentially breaking changes in build setup
- fips_cmake = fips_dir + '/cmake-toolchains/android.toolchain.orig'
- ndk_cmake = get_sdk_dir(fips_dir) + '/ndk-bundle/build/cmake/android.toolchain.cmake'
+ fips_cmake = f'{fips_dir}/cmake-toolchains/android.toolchain.orig'
+ ndk_cmake = f'{get_sdk_dir(fips_dir)}/ndk-bundle/build/cmake/android.toolchain.cmake'
if compute_sha256(ndk_cmake, strip_whitespace) != compute_sha256(fips_cmake, strip_whitespace) :
log.warn('android.toolchain.cmake in fips might be outdated...')
diff --git a/mod/config.py b/mod/config.py
index 8568faba..7d108441 100644
--- a/mod/config.py
+++ b/mod/config.py
@@ -49,7 +49,7 @@ def get_default_config() :
return default_config[util.get_host_platform()]
#-------------------------------------------------------------------------------
-def get_toolchain(fips_dir, proj_dir, cfg) :
+def get_toolchain(fips_dir, proj_dir, cfg):
"""get the toolchain path location for a config, this first checks
for a 'cmake-toolchain' attribute, and if this does not exist, builds
a xxx.toolchain.cmake file from the platform name (only for cross-
@@ -73,53 +73,50 @@ def get_toolchain(fips_dir, proj_dir, cfg) :
# build toolchain file name
toolchain = None
- if 'cmake-toolchain' in cfg :
+ if 'cmake-toolchain' in cfg:
toolchain = cfg['cmake-toolchain']
- else :
- toolchain = '{}.toolchain.cmake'.format(cfg['platform'])
-
- # look for toolchain file in current project directory
- toolchain_dir = util.get_toolchains_dir(proj_dir)
- toolchain_path = None
- if toolchain_dir:
- toolchain_path = toolchain_dir + '/' + toolchain
- if toolchain_path and os.path.isfile(toolchain_path) :
+ else:
+ toolchain = f"{cfg['platform']}.toolchain.cmake"
+
+ if toolchain_dir := util.get_toolchains_dir(proj_dir):
+ toolchain_path = f'{toolchain_dir}/{toolchain}'
+ else:
+ toolchain_path = None
+ if toolchain_path and os.path.isfile(toolchain_path):
return toolchain_path
- else :
- # look for toolchain in all imported directories
- _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir)
- for imported_proj_name in imported_projs :
- imported_proj_dir = imported_projs[imported_proj_name]['proj_dir']
- toolchain_dir = util.get_toolchains_dir(imported_proj_dir)
- toolchain_path = None
- if toolchain_dir:
- toolchain_path = toolchain_dir + '/' + toolchain
- if toolchain_path and os.path.isfile(toolchain_path):
- return toolchain_path
- else :
+ # look for toolchain in all imported directories
+ _, imported_projs = dep.get_all_imports_exports(fips_dir, proj_dir)
+ for imported_proj_name in imported_projs:
+ imported_proj_dir = imported_projs[imported_proj_name]['proj_dir']
+ toolchain_dir = util.get_toolchains_dir(imported_proj_dir)
+ toolchain_path = None
+ if toolchain_dir:
+ toolchain_path = f'{toolchain_dir}/{toolchain}'
+ if toolchain_path and os.path.isfile(toolchain_path):
+ return toolchain_path
+ else:
# toolchain is not in current project or imported projects,
# try the fips directory
- toolchain_path = '{}/cmake-toolchains/{}'.format(fips_dir, toolchain)
- if os.path.isfile(toolchain_path) :
- return toolchain_path
+ toolchain_path = f'{fips_dir}/cmake-toolchains/{toolchain}'
+ if os.path.isfile(toolchain_path) :
+ return toolchain_path
# fallthrough: no toolchain file found
return None
#-------------------------------------------------------------------------------
-def exists(pattern, proj_dirs) :
+def exists(pattern, proj_dirs):
"""test if at least one matching config exists
:param pattern: config name pattern (e.g. 'linux-make-*')
:param proj_dir: array of toplevel dirs to search (must have /configs subdir)
:returns: True if at least one matching config exists
"""
- for curDir in proj_dirs :
- if len(glob.glob('{}/configs/{}.yml'.format(curDir, pattern))) > 0 :
- return True
- return False
+ return any(
+ glob.glob(f'{curDir}/configs/{pattern}.yml') for curDir in proj_dirs
+ )
#-------------------------------------------------------------------------------
-def get_config_dirs(fips_dir, proj_dir) :
+def get_config_dirs(fips_dir, proj_dir):
"""return list of config directories, including all imports
:param fips_dir: absolute fips directory
@@ -127,21 +124,20 @@ def get_config_dirs(fips_dir, proj_dir) :
:returns: list of all directories with config files
"""
dirs = []
- if fips_dir != proj_dir :
+ if fips_dir != proj_dir:
success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
- if success :
- for dep_proj_name in result :
+ if success:
+ for dep_proj_name in result:
dep_proj_dir = result[dep_proj_name]['proj_dir']
- dep_configs_dir = util.get_configs_dir(dep_proj_dir)
- if dep_configs_dir:
+ if dep_configs_dir := util.get_configs_dir(dep_proj_dir):
dirs.append(dep_configs_dir)
- else :
+ else:
log.warn("missing import directories, please run 'fips fetch'")
- dirs.append(fips_dir + '/configs')
+ dirs.append(f'{fips_dir}/configs')
return dirs
#-------------------------------------------------------------------------------
-def list(fips_dir, proj_dir, pattern) :
+def list(fips_dir, proj_dir, pattern):
"""return { dir : [cfgname, ...] } in fips_dir/configs and
proj_dir/fips-files/configs
@@ -152,9 +148,9 @@ def list(fips_dir, proj_dir, pattern) :
"""
dirs = get_config_dirs(fips_dir, proj_dir)
res = OrderedDict()
- for curDir in dirs :
+ for curDir in dirs:
res[curDir] = []
- paths = glob.glob('{}/*.yml'.format(curDir))
+ paths = glob.glob(f'{curDir}/*.yml')
for path in paths :
fname = os.path.split(path)[1]
fname = os.path.splitext(fname)[0]
@@ -163,7 +159,7 @@ def list(fips_dir, proj_dir, pattern) :
return res
#-------------------------------------------------------------------------------
-def load(fips_dir, proj_dir, pattern) :
+def load(fips_dir, proj_dir, pattern):
"""load one or more matching configs from fips and current project dir
:param fips_dir: absolute fips directory
@@ -173,8 +169,8 @@ def load(fips_dir, proj_dir, pattern) :
"""
dirs = get_config_dirs(fips_dir, proj_dir)
configs = []
- for curDir in dirs :
- paths = glob.glob('{}/{}.yml'.format(curDir, pattern))
+ for curDir in dirs:
+ paths = glob.glob(f'{curDir}/{pattern}.yml')
for path in paths :
try :
with open(path, 'r') as f :
@@ -233,7 +229,7 @@ def check_sdk(fips_dir, platform_name) :
return True
#-------------------------------------------------------------------------------
-def check_config_valid(fips_dir, proj_dir, cfg, print_errors=False) :
+def check_config_valid(fips_dir, proj_dir, cfg, print_errors=False):
"""check if provided config is valid, and print errors if not
:param cfg: a loaded config object
@@ -246,31 +242,35 @@ def check_config_valid(fips_dir, proj_dir, cfg, print_errors=False) :
# (NOTE: name and folder should always be present since they are appended
# during loading)
required_fields = ['name', 'folder', 'platform', 'generator', 'build_tool', 'build_type']
- for field in required_fields :
- if field not in cfg :
- messages.append("missing field '{}' in '{}'".format(field, cfg['path']))
+ for field in required_fields:
+ if field not in cfg:
+ messages.append(f"missing field '{field}' in '{cfg['path']}'")
valid = False
-
+
# check if the target platform SDK is installed
- if not check_sdk(fips_dir, cfg['platform']) :
- messages.append("platform sdk for '{}' not installed (see './fips help setup')".format(cfg['platform']))
+ if not check_sdk(fips_dir, cfg['platform']):
+ messages.append(
+ f"platform sdk for '{cfg['platform']}' not installed (see './fips help setup')"
+ )
valid = False
# check if build tool is valid
- if not valid_build_tool(cfg['build_tool']) :
- messages.append("invalid build_tool name '{}' in '{}'".format(cfg['build_tool'], cfg['path']))
+ if not valid_build_tool(cfg['build_tool']):
+ messages.append(
+ f"invalid build_tool name '{cfg['build_tool']}' in '{cfg['path']}'"
+ )
valid = False
# check if the build tool can be found
- if not check_build_tool(fips_dir, cfg['build_tool']) :
- messages.append("build tool '{}' not found".format(cfg['build_tool']))
+ if not check_build_tool(fips_dir, cfg['build_tool']):
+ messages.append(f"build tool '{cfg['build_tool']}' not found")
valid = False
# check if the toolchain file can be found (if this is a crosscompiling toolchain)
- if cfg['platform'] not in native_platforms :
+ if cfg['platform'] not in native_platforms:
toolchain_path = get_toolchain(fips_dir, proj_dir, cfg)
- if not toolchain_path :
- messages.append("toolchain file not found for config '{}'!".format(cfg['name']))
+ if not toolchain_path:
+ messages.append(f"toolchain file not found for config '{cfg['name']}'!")
valid = False
if print_errors :
diff --git a/mod/dep.py b/mod/dep.py
index e239f3a9..7069285b 100644
--- a/mod/dep.py
+++ b/mod/dep.py
@@ -8,7 +8,7 @@
from mod.tools import git
#-------------------------------------------------------------------------------
-def get_imports(fips_dir, proj_dir) :
+def get_imports(fips_dir, proj_dir):
"""get the imports from the fips.yml file in proj_dir
:param proj_dir: the project directory
@@ -16,44 +16,45 @@ def get_imports(fips_dir, proj_dir) :
"""
proj_name = util.get_project_name_from_dir(proj_dir)
imports = {}
- if util.is_valid_project_dir(proj_dir) :
+ if util.is_valid_project_dir(proj_dir):
dic = util.load_fips_yml(proj_dir)
if 'imports' in dic :
imports = dic['imports']
# warn if this is an old-style list instead of new style dict
- if imports :
- if type(imports) is list :
- log.warn("imports in '{}/fips.yml' uses obsolete array format".format(proj_dir))
-
+ if imports:
+ if type(imports) is list:
+ log.warn(f"imports in '{proj_dir}/fips.yml' uses obsolete array format")
+
# convert old style to new dict format
# FIXME: should be removed after a while
new_imports = {}
- for dep in imports :
+ for dep in imports:
dep_url = registry.get_url(fips_dir, dep)
- if not util.is_git_url(dep_url) :
- log.error("'{}' cannot be resolved into a git url (in project '{}')".format(dep_url, proj_name))
+ if not util.is_git_url(dep_url):
+ log.error(
+ f"'{dep_url}' cannot be resolved into a git url (in project '{proj_name}')"
+ )
dep_proj_name = util.get_project_name_from_url(dep_url)
- new_imports[dep_proj_name] = {}
- new_imports[dep_proj_name]['git'] = util.get_giturl_from_url(dep_url)
+ new_imports[dep_proj_name] = {'git': util.get_giturl_from_url(dep_url)}
new_imports[dep_proj_name]['branch'] = util.get_gitbranch_from_url(dep_url)
imports = new_imports
- elif type(imports) is dict :
- for dep in imports :
- if not 'branch' in imports[dep] :
+ elif type(imports) is dict:
+ for dep in imports:
+ if 'branch' not in imports[dep]:
imports[dep]['branch'] = 'master'
- if not 'cond' in imports[dep] :
+ if 'cond' not in imports[dep]:
imports[dep]['cond'] = None
- if not 'git' in imports[dep] :
- log.error("no git URL in import '{}' in '{}/fips.yml'!\n".format(dep, proj_dir))
- if not 'group' in imports[dep] :
+ if 'git' not in imports[dep]:
+ log.error(f"no git URL in import '{dep}' in '{proj_dir}/fips.yml'!\n")
+ if 'group' not in imports[dep]:
imports[dep]['group'] = None
- else :
- log.error("imports in '{}/fips.yml' must be a dictionary!".format(proj_dir))
+ else:
+ log.error(f"imports in '{proj_dir}/fips.yml' must be a dictionary!")
return imports
#-------------------------------------------------------------------------------
-def get_exports(proj_dir) :
+def get_exports(proj_dir):
"""get the exports from the fips.yml file in proj_dir
:param proj_dir: the project directory
@@ -64,18 +65,18 @@ def get_exports(proj_dir) :
dic = util.load_fips_yml(proj_dir)
if 'exports' in dic :
exports = dic['exports']
- if not 'header-dirs' in exports :
+ if 'header-dirs' not in exports:
exports['header-dirs'] = []
- if not 'lib-dirs' in exports :
+ if 'lib-dirs' not in exports:
exports['lib-dirs'] = []
- if not 'defines' in exports :
+ if 'defines' not in exports:
exports['defines'] = {}
- if not 'modules' in exports :
+ if 'modules' not in exports:
exports['modules'] = {}
return exports
#-------------------------------------------------------------------------------
-def get_policy(proj_dir, policy) :
+def get_policy(proj_dir, policy):
"""checks whether a policy is defined in the projects fips.yml
and returns its bool value, or the default if not defined.
@@ -91,13 +92,11 @@ def get_policy(proj_dir, policy) :
if 'policies' in dic and type(dic['policies']) is dict:
if policy in dic['policies'] :
return dic['policies'][policy]
- # not found, return default
- if policy in def_policies :
+ if policy in def_policies:
return def_policies[policy]
- else :
# unknown policy, return None
- log.error("unknown policy name: '{}'".format(policy))
- return None
+ log.error(f"unknown policy name: '{policy}'")
+ return None
#-------------------------------------------------------------------------------
def _rec_get_all_imports_exports(fips_dir, proj_dir, result) :
@@ -168,7 +167,7 @@ def get_all_imports_exports(fips_dir, proj_dir) :
return _rec_get_all_imports_exports(fips_dir, proj_dir, result)
#-------------------------------------------------------------------------------
-def _rec_fetch_imports(fips_dir, proj_dir, handled) :
+def _rec_fetch_imports(fips_dir, proj_dir, handled):
"""internal recursive function to fetch project imports,
keeps an array of already handled dirs to break cyclic dependencies
@@ -178,7 +177,7 @@ def _rec_fetch_imports(fips_dir, proj_dir, handled) :
"""
ws_dir = util.get_workspace_dir(fips_dir)
proj_name = util.get_project_name_from_dir(proj_dir)
- if proj_name not in handled :
+ if proj_name not in handled:
handled.append(proj_name)
imports = get_imports(fips_dir, proj_dir)
@@ -186,9 +185,9 @@ def _rec_fetch_imports(fips_dir, proj_dir, handled) :
dep_proj_name = dep
if dep not in handled:
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name))
+ log.colored(log.YELLOW, f"=== dependency: '{dep_proj_name}':")
dep_ok = False
- if not os.path.isdir(dep_proj_dir) :
+ if not os.path.isdir(dep_proj_dir):
# directory did not exist, do a fresh git clone
dep = imports[dep_proj_name]
git_commit = None if 'rev' not in dep else dep['rev']
@@ -200,17 +199,17 @@ def _rec_fetch_imports(fips_dir, proj_dir, handled) :
git_depth = git.clone_depth if not git_commit and 'depth' not in dep else dep['depth']
git_url = dep['git']
git_branch = dep['branch']
- if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir) :
- if git_commit :
- log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit))
+ if git.clone(git_url, git_branch, git_depth, dep_proj_name, ws_dir):
+ if git_commit:
+ log.colored(log.YELLOW, f"=== revision: '{git_commit}':")
dep_ok = git.checkout(dep_proj_dir, git_commit)
- else :
+ else:
dep_ok = True
- else :
- log.error('failed to git clone {} into {}'.format(git_url, dep_proj_dir))
- else :
+ else:
+ log.error(f'failed to git clone {git_url} into {dep_proj_dir}')
+ else:
# directory already exists
- log.info("dir '{}' exists".format(dep_proj_dir))
+ log.info(f"dir '{dep_proj_dir}' exists")
dep_ok = True
# recuse
@@ -230,7 +229,7 @@ def fetch_imports(fips_dir, proj_dir) :
_rec_fetch_imports(fips_dir, proj_dir, [])
#-------------------------------------------------------------------------------
-def gather_imports(fips_dir, proj_dir) :
+def gather_imports(fips_dir, proj_dir):
"""resolve imports of proj_dir and returns a big dictionary
with all imported data, which can then be written with write_imports()
@@ -244,18 +243,18 @@ def gather_imports(fips_dir, proj_dir) :
ws_dir = util.get_workspace_dir(fips_dir)
success, deps = get_all_imports_exports(fips_dir, proj_dir)
- unique_defines = {}
- unique_modules = {}
-
- if success :
+ if success:
+ unique_defines = {}
+ unique_modules = {}
+
# for each project:
- for proj_name in deps :
+ for proj_name in deps:
imports = deps[proj_name]['imports']
exports = deps[proj_name]['exports']
# for each imported project:
- for imp_proj_name in sorted(imports) :
+ for imp_proj_name in sorted(imports):
imported[imp_proj_name] = {}
imported[imp_proj_name]['modules'] = OrderedDict()
@@ -266,53 +265,59 @@ def gather_imports(fips_dir, proj_dir) :
imported[imp_proj_name]['group'] = imports[imp_proj_name]['group']
# add header search paths
- for imp_hdr in deps[imp_proj_name]['exports']['header-dirs'] :
- hdr_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_hdr)
+ for imp_hdr in deps[imp_proj_name]['exports']['header-dirs']:
+ hdr_path = f'{ws_dir}/{imp_proj_name}/{imp_hdr}'
hdr_path = os.path.normpath(hdr_path).replace('\\', '/')
- if not os.path.isdir(hdr_path) :
- log.warn("header search path '{}' not found in project '{}'".format(hdr_path, imp_proj_name))
+ if not os.path.isdir(hdr_path):
+ log.warn(
+ f"header search path '{hdr_path}' not found in project '{imp_proj_name}'"
+ )
imported[imp_proj_name]['hdrdirs'].append(hdr_path)
# add lib search paths
- for imp_lib in deps[imp_proj_name]['exports']['lib-dirs'] :
- lib_path = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_lib)
+ for imp_lib in deps[imp_proj_name]['exports']['lib-dirs']:
+ lib_path = f'{ws_dir}/{imp_proj_name}/{imp_lib}'
lib_path = os.path.normpath(lib_path).replace('\\', '/')
- if not os.path.isdir(lib_path) :
- log.warn("lib search path '{}' not found in project '{}'".format(lib_path, imp_proj_name))
+ if not os.path.isdir(lib_path):
+ log.warn(
+ f"lib search path '{lib_path}' not found in project '{imp_proj_name}'"
+ )
imported[imp_proj_name]['libdirs'].append(lib_path)
# add defines
- for imp_def in deps[imp_proj_name]['exports']['defines'] :
+ for imp_def in deps[imp_proj_name]['exports']['defines']:
# hmm, no check whether this define collides with an earlier define...
value = deps[imp_proj_name]['exports']['defines'][imp_def]
imported[imp_proj_name]['defines'][imp_def] = value
- if imp_def in unique_defines :
- if unique_defines[imp_def] != value :
- log.warn("C define collision: '{}={}' in '{}' collides with '{}={}' in earlier import".format(
- imp_def, value, imp_proj_name, imp_def, unique_defines[define]))
+ if imp_def in unique_defines:
+ if unique_defines[imp_def] != value:
+ log.warn(
+ f"C define collision: '{imp_def}={value}' in '{imp_proj_name}' collides with '{imp_def}={unique_defines[define]}' in earlier import"
+ )
unique_defines[imp_def] = value
# for each imported module:
- for imp_mod in deps[imp_proj_name]['exports']['modules'] :
+ for imp_mod in deps[imp_proj_name]['exports']['modules']:
imp_mod_src = deps[imp_proj_name]['exports']['modules'][imp_mod]
# import module source directory (where module's CMakeLists.txt is)
- src_dir = '{}/{}/{}'.format(ws_dir, imp_proj_name, imp_mod_src)
+ src_dir = f'{ws_dir}/{imp_proj_name}/{imp_mod_src}'
# cmake build subdirectory
- build_dir = '{}_{}'.format(imp_proj_name, imp_mod)
+ build_dir = f'{imp_proj_name}_{imp_mod}'
imported[imp_proj_name]['modules'][src_dir] = build_dir
- if imp_mod in unique_modules :
- if unique_modules[imp_mod] != src_dir :
- log.warn("Import module '{}=>{}' in '{}' collides with '{}=>{}' in earlier import".format(
- imp_mod, src_dir, imp_proj_name, imp_mod, unique_modules[imp_mod]))
+ if imp_mod in unique_modules:
+ if unique_modules[imp_mod] != src_dir:
+ log.warn(
+ f"Import module '{imp_mod}=>{src_dir}' in '{imp_proj_name}' collides with '{imp_mod}=>{unique_modules[imp_mod]}' in earlier import"
+ )
unique_modules[imp_mod] = src_dir
return imported
- else :
+ else:
log.warn("imports are incomplete, please run 'fips fetch'")
return None
#-------------------------------------------------------------------------------
-def write_imports(fips_dir, proj_dir, cfg_name, imported) :
+def write_imports(fips_dir, proj_dir, cfg_name, imported):
"""write the big imports map created with 'gather_imports'
to a .fips-imports.cmake file in the current project
@@ -321,7 +326,7 @@ def write_imports(fips_dir, proj_dir, cfg_name, imported) :
:params imported: the imports dictionary created with 'gather_imports'
"""
- if imported :
+ if imported:
unique_hdrdirs = []
unique_libdirs = []
unique_defines = {}
@@ -331,64 +336,64 @@ def write_imports(fips_dir, proj_dir, cfg_name, imported) :
# this will replace the old file, but only if the
# content is different, this will prevent an unnecessary
# cmake run if the imports haven't changed
- import_filename = proj_dir + '/.fips-imports.cmake'
- import_tmp_filename = import_filename + '.tmp'
- with open(import_tmp_filename, 'w') as f :
+ import_filename = f'{proj_dir}/.fips-imports.cmake'
+ import_tmp_filename = f'{import_filename}.tmp'
+ with open(import_tmp_filename, 'w') as f:
f.write("#\n# generated by 'fips gen', don't edit, don't add to version control!\n#\n")
-
- for imp_proj_name in imported :
+
+ for imp_proj_name in imported:
imp_proj_dir = util.get_project_dir(fips_dir, imp_proj_name)
-
+
if imported[imp_proj_name]['cond']:
- f.write('if ({})\n'.format(imported[imp_proj_name]['cond']))
+ f.write(f"if ({imported[imp_proj_name]['cond']})\n")
# add include and lib search paths
- if imp_proj_dir != proj_dir :
- f.write('if (EXISTS "{}/fips-include.cmake")\n'.format(imp_proj_dir))
- f.write(' include("{}/fips-include.cmake")\n'.format(imp_proj_dir))
- f.write('elseif (EXISTS "{}/fips-files/include.cmake")\n'.format(imp_proj_dir))
- f.write(' include ("{}/fips-files/include.cmake")\n'.format(imp_proj_dir))
+ if imp_proj_dir != proj_dir:
+ f.write(f'if (EXISTS "{imp_proj_dir}/fips-include.cmake")\n')
+ f.write(f' include("{imp_proj_dir}/fips-include.cmake")\n')
+ f.write(f'elseif (EXISTS "{imp_proj_dir}/fips-files/include.cmake")\n')
+ f.write(f' include ("{imp_proj_dir}/fips-files/include.cmake")\n')
f.write('endif()\n')
f.write('if (EXISTS "{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir))
f.write(' link_directories("{}/lib/${{FIPS_PLATFORM_NAME}}")\n'.format(imp_proj_dir))
f.write('endif()\n')
# add header search paths
- for hdrdir in imported[imp_proj_name]['hdrdirs'] :
- if hdrdir not in unique_hdrdirs :
- f.write('include_directories("{}")\n'.format(hdrdir))
+ for hdrdir in imported[imp_proj_name]['hdrdirs']:
+ if hdrdir not in unique_hdrdirs:
+ f.write(f'include_directories("{hdrdir}")\n')
unique_hdrdirs.append(hdrdir)
# add lib search paths
- for libdir in imported[imp_proj_name]['libdirs'] :
- if libdir not in unique_libdirs :
- f.write('link_directories("{}")\n'.format(libdir))
+ for libdir in imported[imp_proj_name]['libdirs']:
+ if libdir not in unique_libdirs:
+ f.write(f'link_directories("{libdir}")\n')
unique_libdirs.append(libdir)
# add defines
- for define in imported[imp_proj_name]['defines'] :
- value = imported[imp_proj_name]['defines'][define]
- if define not in unique_defines :
+ for define in imported[imp_proj_name]['defines']:
+ if define not in unique_defines:
+ value = imported[imp_proj_name]['defines'][define]
unique_defines[define] = value
- if type(value) is str :
- f.write('add_definitions(-D{}="{}")\n'.format(define, value))
- else :
- f.write('add_definitions(-D{}={})\n'.format(define, value))
+ if type(value) is str:
+ f.write(f'add_definitions(-D{define}="{value}")\n')
+ else:
+ f.write(f'add_definitions(-D{define}={value})\n')
# add import modules
- if len(imported[imp_proj_name]['modules']) > 0 :
+ if len(imported[imp_proj_name]['modules']) > 0:
import_functions = []
# first add all module import functions
- for module in imported[imp_proj_name]['modules'] :
+ for module in imported[imp_proj_name]['modules']:
module_path = imported[imp_proj_name]['modules'][module]
- if module not in unique_modules :
+ if module not in unique_modules:
unique_modules[module] = module_path
- import_func = 'fips_import_{}'.format(module_path).replace('-','_')
+ import_func = f'fips_import_{module_path}'.replace('-', '_')
import_functions.append(import_func)
- f.write('macro({})\n'.format(import_func))
+ f.write(f'macro({import_func})\n')
f.write(' set(FIPS_IMPORT 1)\n')
- f.write(' add_subdirectory("{}" "{}")\n'.format(module, module_path))
+ f.write(f' add_subdirectory("{module}" "{module_path}")\n')
f.write(' set(FIPS_IMPORT)\n')
f.write('endmacro()\n')
@@ -396,15 +401,15 @@ def write_imports(fips_dir, proj_dir, cfg_name, imported) :
f.write('if (FIPS_AUTO_IMPORT)\n')
group = "Imports"
if imported[imp_proj_name]['group'] :
- group += "/" + imported[imp_proj_name]['group']
- if len(imported[imp_proj_name]['modules']) > 3 :
- group += "/" + imp_proj_name
- f.write(' fips_ide_group("{}")\n'.format(group))
- for import_func in import_functions :
- f.write(' {}()\n'.format(import_func))
+ group += "/" + imported[imp_proj_name]['group']
+ if len(imported[imp_proj_name]['modules']) > 3:
+ group += f"/{imp_proj_name}"
+ f.write(f' fips_ide_group("{group}")\n')
+ for import_func in import_functions:
+ f.write(f' {import_func}()\n')
f.write(' fips_ide_group("")\n')
f.write('endif()\n')
-
+
if imported[imp_proj_name]['cond']:
f.write('endif()\n')
@@ -421,16 +426,16 @@ def write_imports(fips_dir, proj_dir, cfg_name, imported) :
os.remove(import_tmp_filename)
# write the .fips-imports.py file (copy from template)
- gen_search_paths = '"{}","{}/generators",\n'.format(fips_dir, fips_dir)
- proj_gen_dir = util.get_generators_dir(proj_dir)
- if proj_gen_dir:
- gen_search_paths += '"{}","{}",\n'.format(proj_dir, proj_gen_dir)
- for imp_proj_name in imported :
- gen_dir = util.get_generators_dir(util.get_project_dir(fips_dir, imp_proj_name))
- if gen_dir:
- gen_search_paths += '"' + gen_dir + '",\n'
+ gen_search_paths = f'"{fips_dir}","{fips_dir}/generators",\n'
+ if proj_gen_dir := util.get_generators_dir(proj_dir):
+ gen_search_paths += f'"{proj_dir}","{proj_gen_dir}",\n'
+ for imp_proj_name in imported:
+ if gen_dir := util.get_generators_dir(
+ util.get_project_dir(fips_dir, imp_proj_name)
+ ):
+ gen_search_paths += f'"{gen_dir}' + '",\n'
proj_name = util.get_project_name_from_dir(proj_dir)
- build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name);
+ build_dir = util.get_build_dir(fips_dir, proj_name, cfg_name);
if not os.path.isdir(build_dir):
os.makedirs(build_dir)
template.copy_template_file(fips_dir, build_dir, 'fips-gen.py', { 'genpaths': gen_search_paths}, True)
@@ -445,7 +450,7 @@ def gather_and_write_imports(fips_dir, proj_dir, cfg_name) :
log.error("project imports are incomplete, please run 'fips fetch'")
#-------------------------------------------------------------------------------
-def check_imports(fips_dir, proj_dir) :
+def check_imports(fips_dir, proj_dir):
"""do various checks on the imports of a project
:param fips_dir: absolute fips directory
@@ -456,23 +461,23 @@ def check_imports(fips_dir, proj_dir) :
# check whether any imported projects are in sync with the remote git repo
success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
num_imports = 0
- for imp_proj_name in imported_projects :
+ for imp_proj_name in imported_projects:
imp_proj_dir = imported_projects[imp_proj_name]['proj_dir']
# don't git-check the top-level project directory
- if imp_proj_dir != proj_dir :
+ if imp_proj_dir != proj_dir:
num_imports += 1
- log.info("git status of '{}':".format(imp_proj_name))
- if os.path.isdir(imp_proj_dir) :
- if os.path.isdir("{}/.git".format(imp_proj_dir)) :
- if git.check_out_of_sync(imp_proj_dir) :
- log.warn(" '{}' is out of sync with remote git repo".format(imp_proj_dir))
- else :
+ log.info(f"git status of '{imp_proj_name}':")
+ if os.path.isdir(imp_proj_dir):
+ if os.path.isdir(f"{imp_proj_dir}/.git"):
+ if git.check_out_of_sync(imp_proj_dir):
+ log.warn(f" '{imp_proj_dir}' is out of sync with remote git repo")
+ else:
log.colored(log.GREEN, ' uptodate')
- else :
- log.colored(log.GREEN, " '{}' is not a git repository".format(imp_proj_dir))
- else :
- log.warn(" '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
+ else:
+ log.colored(log.GREEN, f" '{imp_proj_dir}' is not a git repository")
+ else:
+ log.warn(f" '{imp_proj_dir}' does not exist, please run 'fips fetch'")
if success and num_imports == 0 :
log.info(' none')
@@ -480,7 +485,7 @@ def check_imports(fips_dir, proj_dir) :
gather_imports(fips_dir, proj_dir)
#-------------------------------------------------------------------------------
-def check_local_changes(fips_dir, proj_dir) :
+def check_local_changes(fips_dir, proj_dir):
"""this is a variation of check_imports which just checks for local
(uncommitted or unpushed) changes.
@@ -490,54 +495,54 @@ def check_local_changes(fips_dir, proj_dir) :
"""
success, imported_projects = get_all_imports_exports(fips_dir, proj_dir)
num_imports = 0
- for imp_proj_name in imported_projects :
+ for imp_proj_name in imported_projects:
imp_proj_dir = imported_projects[imp_proj_name]['proj_dir']
# don't git-check the top-level project directory
- if imp_proj_dir != proj_dir :
+ if imp_proj_dir != proj_dir:
num_imports += 1
- log.info("checking '{}':".format(imp_proj_name))
- if os.path.isdir(imp_proj_dir) :
- if git.has_local_changes(imp_proj_dir) :
- log.warn(" '{}' has local changes (uncommitted and/or unpushed)".format(imp_proj_dir))
- else :
+ log.info(f"checking '{imp_proj_name}':")
+ if os.path.isdir(imp_proj_dir):
+ if git.has_local_changes(imp_proj_dir):
+ log.warn(f" '{imp_proj_dir}' has local changes (uncommitted and/or unpushed)")
+ else:
log.colored(log.GREEN, ' no local changes')
- else :
- log.warn(" '{}' does not exist, please run 'fips fetch'".format(imp_proj_dir))
+ else:
+ log.warn(f" '{imp_proj_dir}' does not exist, please run 'fips fetch'")
if success and num_imports == 0 :
log.info(' none')
#-------------------------------------------------------------------------------
-def _rec_update_imports(fips_dir, proj_dir, handled) :
+def _rec_update_imports(fips_dir, proj_dir, handled):
"""same as _rec_fetch_imports() but for updating the imported projects
"""
ws_dir = util.get_workspace_dir(fips_dir)
proj_name = util.get_project_name_from_dir(proj_dir)
- if proj_name not in handled :
+ if proj_name not in handled:
handled.append(proj_name)
imports = get_imports(fips_dir, proj_dir)
for dep in imports:
dep_proj_name = dep
if dep not in handled:
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- log.colored(log.YELLOW, "=== dependency: '{}':".format(dep_proj_name))
+ log.colored(log.YELLOW, f"=== dependency: '{dep_proj_name}':")
dep_ok = False
- if os.path.isdir(dep_proj_dir) :
+ if os.path.isdir(dep_proj_dir):
# directory did not exist, do a fresh git clone
dep = imports[dep_proj_name]
git_commit = None if 'rev' not in dep else dep['rev']
- if git.has_local_changes(dep_proj_dir) :
- log.warn(" '{}' has local changes, skipping...".format(dep_proj_dir))
- else :
- log.colored(log.BLUE, " updating '{}'...".format(dep_proj_dir))
+ if git.has_local_changes(dep_proj_dir):
+ log.warn(f" '{dep_proj_dir}' has local changes, skipping...")
+ else:
+ log.colored(log.BLUE, f" updating '{dep_proj_dir}'...")
git.update(dep_proj_dir)
if git_commit:
- log.colored(log.YELLOW, "=== revision: '{}':".format(git_commit))
+ log.colored(log.YELLOW, f"=== revision: '{git_commit}':")
dep_ok = git.checkout(dep_proj_dir, git_commit)
else:
dep_ok = True
- else :
- log.warn(" '{}' does not exist, please run 'fips fetch'".format(dep_proj_dir))
+ else:
+ log.warn(f" '{dep_proj_dir}' does not exist, please run 'fips fetch'")
# recuse
if dep_ok :
handled = _rec_update_imports(fips_dir, dep_proj_dir, handled)
diff --git a/mod/emscripten.py b/mod/emscripten.py
index cf177093..ef77f922 100644
--- a/mod/emscripten.py
+++ b/mod/emscripten.py
@@ -1,5 +1,6 @@
"""emscripten SDK support"""
+
import os
import sys
import zipfile
@@ -19,9 +20,9 @@
}
urls = {
- 'win': 'http://s3.amazonaws.com/mozilla-games/emscripten/releases/{}'.format(archives['win']),
- 'osx' : 'http://s3.amazonaws.com/mozilla-games/emscripten/releases/{}'.format(archives['osx']),
- 'linux' : 'http://s3.amazonaws.com/mozilla-games/emscripten/releases/{}'.format(archives['linux'])
+ 'win': f"http://s3.amazonaws.com/mozilla-games/emscripten/releases/{archives['win']}",
+ 'osx': f"http://s3.amazonaws.com/mozilla-games/emscripten/releases/{archives['osx']}",
+ 'linux': f"http://s3.amazonaws.com/mozilla-games/emscripten/releases/{archives['linux']}",
}
# define SDK version, note that the right version must also
@@ -38,18 +39,18 @@ def get_sdk_url() :
return urls[util.get_host_platform()]
#-------------------------------------------------------------------------------
-def get_sdk_dir(fips_dir) :
+def get_sdk_dir(fips_dir):
"""return the platform-specific SDK dir"""
- return util.get_workspace_dir(fips_dir) + '/fips-sdks/' + util.get_host_platform()
+ return f'{util.get_workspace_dir(fips_dir)}/fips-sdks/{util.get_host_platform()}'
#-------------------------------------------------------------------------------
def get_sdk_version() :
return sdk_version[util.get_host_platform()]
#-------------------------------------------------------------------------------
-def get_emsdk_dir(fips_dir) :
+def get_emsdk_dir(fips_dir):
"""return the emscripten SDK path (emsdk-portable)"""
- return get_sdk_dir(fips_dir) + '/emsdk-portable'
+ return f'{get_sdk_dir(fips_dir)}/emsdk-portable'
#-------------------------------------------------------------------------------
def get_archive_name() :
@@ -57,9 +58,9 @@ def get_archive_name() :
return archives[util.get_host_platform()]
#-------------------------------------------------------------------------------
-def get_archive_path(fips_dir) :
+def get_archive_path(fips_dir):
"""return path to sdk archive"""
- return get_sdk_dir(fips_dir) + '/' + get_archive_name()
+ return f'{get_sdk_dir(fips_dir)}/{get_archive_name()}'
#-------------------------------------------------------------------------------
def ensure_sdk_dirs(fips_dir) :
@@ -69,45 +70,61 @@ def ensure_sdk_dirs(fips_dir) :
os.makedirs(emsdk_dir)
#-------------------------------------------------------------------------------
-def uncompress(src_path, dst_path, zip_dir_name) :
- if '.zip' in src_path :
+def uncompress(src_path, dst_path, zip_dir_name):
+ if '.zip' in src_path:
with zipfile.ZipFile(src_path, 'r') as archive:
- archive.extractall(dst_path + '/' + zip_dir_name)
- elif '.tgz' or '.bz2' in path :
- subprocess.call('tar -xvf {}'.format(src_path), cwd=dst_path, shell=True)
+ archive.extractall(f'{dst_path}/{zip_dir_name}')
+ else:
+ subprocess.call(f'tar -xvf {src_path}', cwd=dst_path, shell=True)
#-------------------------------------------------------------------------------
-def finish(sdk_dir) :
+def finish(sdk_dir):
"""finish setting up the emscripten SDK
FIXME: the used SDK version should be configurable!
"""
- if util.get_host_platform() == 'win' :
+ if util.get_host_platform() == 'win':
# on Windows use a stable SDK version which doesn't require clang to be compiled
subprocess.call(args='emsdk.bat update', cwd=sdk_dir, shell=True)
- subprocess.call(args='emsdk.bat install --shallow --disable-assertions {}'.format(get_sdk_version()), cwd=sdk_dir, shell=True)
- subprocess.call(args='emsdk.bat activate --embedded {}'.format(get_sdk_version()), cwd=sdk_dir, shell=True)
- else :
+ subprocess.call(
+ args=f'emsdk.bat install --shallow --disable-assertions {get_sdk_version()}',
+ cwd=sdk_dir,
+ shell=True,
+ )
+ subprocess.call(
+ args=f'emsdk.bat activate --embedded {get_sdk_version()}',
+ cwd=sdk_dir,
+ shell=True,
+ )
+ else:
subprocess.call(args='./emsdk update', cwd=sdk_dir, shell=True)
- subprocess.call(args='./emsdk install --shallow --disable-assertions {}'.format(get_sdk_version()), cwd=sdk_dir, shell=True)
- subprocess.call(args='./emsdk activate --embedded {}'.format(get_sdk_version()), cwd=sdk_dir, shell=True)
+ subprocess.call(
+ args=f'./emsdk install --shallow --disable-assertions {get_sdk_version()}',
+ cwd=sdk_dir,
+ shell=True,
+ )
+ subprocess.call(
+ args=f'./emsdk activate --embedded {get_sdk_version()}',
+ cwd=sdk_dir,
+ shell=True,
+ )
#-------------------------------------------------------------------------------
-def setup(fips_dir, proj_dir) :
+def setup(fips_dir, proj_dir):
"""setup the emscripten SDK from scratch"""
log.colored(log.YELLOW, '=== setup emscripten SDK:')
ensure_sdk_dirs(fips_dir)
# download SDK archive
- if not os.path.isfile(get_archive_path(fips_dir)) :
- log.info("downloading '{}'...".format(get_archive_name()))
+ if not os.path.isfile(get_archive_path(fips_dir)):
+ log.info(f"downloading '{get_archive_name()}'...")
urllib.urlretrieve(get_sdk_url(), get_archive_path(fips_dir), util.url_download_hook)
- else :
- log.info("'{}' already exists".format(get_archive_name()))
+ else:
+ log.info(f"'{get_archive_name()}' already exists")
# uncompress SDK archive
- log.info("\nuncompressing '{}'...".format(get_archive_name()))
+ log.info(f"\nuncompressing '{get_archive_name()}'...")
uncompress(get_archive_path(fips_dir), get_sdk_dir(fips_dir), 'emsdk-portable')
# setup SDK
diff --git a/mod/fips.py b/mod/fips.py
index 8ff47a30..57aa22e9 100644
--- a/mod/fips.py
+++ b/mod/fips.py
@@ -10,44 +10,44 @@
from mod import log, verb, util
#-------------------------------------------------------------------------------
-def show_help(args) :
+def show_help(args):
"""show help text"""
- if len(args) > 0 :
+ if len(args) > 0:
# show help for one verb
verb_name = args[0]
- if verb_name in verb.verbs :
+ if verb_name in verb.verbs:
verb.verbs[verb_name].help()
- else :
- log.error("unknown verb '{}'".format(verb))
- else :
+ else:
+ log.error(f"unknown verb '{verb}'")
+ else:
# show generic help
- log.info("fips: the high-level, multi-platform build system wrapper\n"
- "v{}\n"
- "https://www.github.com/floooh/fips\n".format(VERSION))
- for proj_name in verb.proj_verbs :
- if proj_name != 'fips' :
- log.colored(log.BLUE, "=== imported from '{}':".format(proj_name))
+ log.info(
+ f"fips: the high-level, multi-platform build system wrapper\nv{VERSION}\nhttps://www.github.com/floooh/fips\n"
+ )
+ for proj_name in verb.proj_verbs:
+ if proj_name != 'fips':
+ log.colored(log.BLUE, f"=== imported from '{proj_name}':")
for verb_name in verb.proj_verbs[proj_name] :
verb.verbs[verb_name].help()
log.info(' ')
#-------------------------------------------------------------------------------
-def run(fips_path, proj_path, args) :
+def run(fips_path, proj_path, args):
fips_path = util.fix_path(fips_path)
proj_path = util.fix_path(proj_path)
verb.import_verbs(fips_path, proj_path)
if len(args) <= 1:
print("run 'fips help' for more info")
- else :
+ else:
verb_name = args[1]
verb_args = args[2:]
- if verb_name in ['help', '--help', '-help'] :
+ if verb_name in ['help', '--help', '-help']:
show_help(verb_args)
elif verb_name == '--version' :
log.info(VERSION)
elif verb_name in verb.verbs :
verb.verbs[verb_name].run(fips_path, proj_path, verb_args)
- else :
- log.error("unknown verb '{}'".format(verb_name))
+ else:
+ log.error(f"unknown verb '{verb_name}'")
diff --git a/mod/log.py b/mod/log.py
index ff941257..46966cf6 100644
--- a/mod/log.py
+++ b/mod/log.py
@@ -9,7 +9,7 @@
DEF = '\033[39m'
#-------------------------------------------------------------------------------
-def error(msg, fatal=True) :
+def error(msg, fatal=True):
"""
Print error message and exit with error code 10
unless 'fatal' is False.
@@ -17,41 +17,41 @@ def error(msg, fatal=True) :
:param msg: string message
:param fatal: exit program with error code 10 if True (default is true)
"""
- print('{}[ERROR]{} {}'.format(RED, DEF, msg))
+ print(f'{RED}[ERROR]{DEF} {msg}')
if fatal :
sys.exit(10)
#-------------------------------------------------------------------------------
-def warn(msg) :
+def warn(msg):
"""print a warning message"""
- print('{}[WARNING]{} {}'.format(YELLOW, DEF, msg))
+ print(f'{YELLOW}[WARNING]{DEF} {msg}')
#-------------------------------------------------------------------------------
-def ok(item, status) :
+def ok(item, status):
"""print a green 'ok' message
:param item: first part of message
:param status: status (colored green)
"""
- print('{}:\t{}{}{}'.format(item, GREEN, status, DEF))
+ print(f'{item}:\t{GREEN}{status}{DEF}')
#-------------------------------------------------------------------------------
-def failed(item, status) :
+def failed(item, status):
"""print a red 'fail' message
:param item: first part of message
:param status: status (colored red)
"""
- print('{}:\t{}{}{}'.format(item, RED, status, DEF))
+ print(f'{item}:\t{RED}{status}{DEF}')
#-------------------------------------------------------------------------------
-def optional(item, status) :
+def optional(item, status):
"""print a yellow 'optional' message
:param item: first part of message
:param status: status (colored red)
"""
- print('{}:\t{}{}{}'.format(item, YELLOW, status, DEF))
+ print(f'{item}:\t{YELLOW}{status}{DEF}')
#-------------------------------------------------------------------------------
def info(msg) :
@@ -62,11 +62,11 @@ def info(msg) :
print(msg)
#-------------------------------------------------------------------------------
-def colored(color, msg) :
+def colored(color, msg):
"""print a colored log message
:param color: color escape sequence (e.g. log.YELLOW)
:param msg: text message
"""
- print('{}{}{}'.format(color, msg, DEF))
+ print(f'{color}{msg}{DEF}')
diff --git a/mod/markdeep.py b/mod/markdeep.py
index 9d2c0f19..339a5c40 100644
--- a/mod/markdeep.py
+++ b/mod/markdeep.py
@@ -6,8 +6,10 @@
def build(fips_dir, proj_dir):
# target directory will be 'fips-deploy/[proj]-markdeep
proj_name = util.get_project_name_from_dir(proj_dir)
- out_dir = util.get_workspace_dir(fips_dir)+'/fips-deploy/'+proj_name+'-markdeep'
- log.info('building to: {}...'.format(out_dir))
+ out_dir = (
+ f'{util.get_workspace_dir(fips_dir)}/fips-deploy/{proj_name}-markdeep'
+ )
+ log.info(f'building to: {out_dir}...')
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
os.makedirs(out_dir)
@@ -15,12 +17,14 @@ def build(fips_dir, proj_dir):
# check all .h files for embedded documentation
hdrs = []
for root, dirnames, filenames in os.walk(proj_dir):
- for filename in fnmatch.filter(filenames, '*.h'):
- hdrs.append(os.path.join(root, filename).replace('\\','/'))
+ hdrs.extend(
+ os.path.join(root, filename).replace('\\', '/')
+ for filename in fnmatch.filter(filenames, '*.h')
+ )
markdeep_files = []
capture_begin = re.compile(r'/\*#\s')
for hdr in hdrs:
- log.info(' parsing {}'.format(hdr))
+ log.info(f' parsing {hdr}')
capturing = False
markdeep_lines = []
with open(hdr, 'r') as src:
@@ -39,8 +43,8 @@ def build(fips_dir, proj_dir):
capturing = True
if markdeep_lines:
markdeep_files.append(hdr)
- dst_path = out_dir + '/' + os.path.relpath(hdr,proj_dir) + '.html'
- log.info(' markdeep block(s) found, writing: {}'.format(dst_path))
+ dst_path = f'{out_dir}/{os.path.relpath(hdr, proj_dir)}.html'
+ log.info(f' markdeep block(s) found, writing: {dst_path}')
dst_dir = os.path.dirname(dst_path)
if not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
@@ -51,29 +55,31 @@ def build(fips_dir, proj_dir):
dst.write(line)
dst.write("")
dst.write("")
-
+
# write a toplevel index.html
if markdeep_files:
markdeep_files = sorted(markdeep_files)
- dst_path = out_dir + '/index.html'
- log.info('writing toc file: {}'.format(dst_path))
+ dst_path = f'{out_dir}/index.html'
+ log.info(f'writing toc file: {dst_path}')
with open(dst_path, 'w') as dst:
dst.write("\n")
dst.write("\n")
- dst.write('# {}\n'.format(proj_name))
+ dst.write(f'# {proj_name}\n')
for hdr in markdeep_files:
rel_path = os.path.relpath(hdr,proj_dir)
- dst.write('- [{}]({})\n'.format(rel_path, rel_path+'.html'))
+ dst.write(f'- [{rel_path}]({rel_path}.html)\n')
dst.write("")
dst.write("")
else:
- log.error("no headers with embedded markdeep found in '{}'!".format(proj_dir))
+ log.error(f"no headers with embedded markdeep found in '{proj_dir}'!")
# view generated markdeep in browser, we don't need a local http server for that
def view(fips_dir, proj_dir):
proj_name = util.get_project_name_from_dir(proj_dir)
- out_dir = util.get_workspace_dir(fips_dir)+'/fips-deploy/'+proj_name+'-markdeep'
- if os.path.isfile(out_dir+'/index.html'):
+ out_dir = (
+ f'{util.get_workspace_dir(fips_dir)}/fips-deploy/{proj_name}-markdeep'
+ )
+ if os.path.isfile(f'{out_dir}/index.html'):
p = util.get_host_platform()
if p == 'osx':
subprocess.call('open index.html', cwd=out_dir, shell=True)
@@ -82,4 +88,4 @@ def view(fips_dir, proj_dir):
elif p == 'linux':
subprocess.call('xdg-open index.html', cwd=out_dir, shell=True)
else:
- log.error('no generated index.html found: {}'.format(out_dir+'/index.html'))
+ log.error(f'no generated index.html found: {out_dir}/index.html')
diff --git a/mod/project.py b/mod/project.py
index ba691cfd..7c5bd469 100644
--- a/mod/project.py
+++ b/mod/project.py
@@ -9,7 +9,7 @@
from mod.tools import git, cmake, make, ninja, xcodebuild, xcrun, ccmake, cmake_gui, vscode, clion
#-------------------------------------------------------------------------------
-def init(fips_dir, proj_name) :
+def init(fips_dir, proj_name):
"""initialize an existing project directory as a fips directory by
copying essential files and creating or updating .gitignore
@@ -19,21 +19,21 @@ def init(fips_dir, proj_name) :
"""
ws_dir = util.get_workspace_dir(fips_dir)
proj_dir = util.get_project_dir(fips_dir, proj_name)
- if os.path.isdir(proj_dir) :
+ if os.path.isdir(proj_dir):
templ_values = {
'project': proj_name
}
for f in ['CMakeLists.txt', 'fips', 'fips.cmd', 'fips.yml'] :
template.copy_template_file(fips_dir, proj_dir, f, templ_values)
- os.chmod(proj_dir + '/fips', 0o744)
+ os.chmod(f'{proj_dir}/fips', 0o744)
gitignore_entries = ['.fips-*', '*.pyc', '.vscode/', '.idea/']
template.write_git_ignore(proj_dir, gitignore_entries)
- else :
- log.error("project dir '{}' does not exist".format(proj_dir))
+ else:
+ log.error(f"project dir '{proj_dir}' does not exist")
return False
#-------------------------------------------------------------------------------
-def clone(fips_dir, url) :
+def clone(fips_dir, url):
"""clone an existing fips project with git, do NOT fetch dependencies
:param fips_dir: absolute path to fips
@@ -43,64 +43,63 @@ def clone(fips_dir, url) :
ws_dir = util.get_workspace_dir(fips_dir)
proj_name = util.get_project_name_from_url(url)
proj_dir = util.get_project_dir(fips_dir, proj_name)
- if not os.path.isdir(proj_dir) :
+ if not os.path.isdir(proj_dir):
git_url = util.get_giturl_from_url(url)
git_branch = util.get_gitbranch_from_url(url)
- if git.clone(git_url, git_branch, git.clone_depth, proj_name, ws_dir) :
+ if git.clone(git_url, git_branch, git.clone_depth, proj_name, ws_dir):
# fetch imports
dep.fetch_imports(fips_dir, proj_dir)
return True
- else :
- log.error("failed to 'git clone {}' into '{}'".format(url, proj_dir))
+ else:
+ log.error(f"failed to 'git clone {url}' into '{proj_dir}'")
return False
- else :
- log.error("project dir '{}' already exists".format(proj_dir))
+ else:
+ log.error(f"project dir '{proj_dir}' already exists")
return False
#-------------------------------------------------------------------------------
-def gen_project(fips_dir, proj_dir, cfg, force) :
+def gen_project(fips_dir, proj_dir, cfg, force):
"""private: generate build files for one config"""
proj_name = util.get_project_name_from_dir(proj_dir)
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
- defines = {}
- defines['FIPS_USE_CCACHE'] = 'ON' if settings.get(proj_dir, 'ccache') else 'OFF'
+ defines = {
+ 'FIPS_USE_CCACHE': 'ON' if settings.get(proj_dir, 'ccache') else 'OFF'
+ }
defines['FIPS_AUTO_IMPORT'] = 'OFF' if dep.get_policy(proj_dir, 'no_auto_import') else 'ON'
if cfg['generator'] in ['Ninja', 'Unix Makefiles']:
defines['CMAKE_EXPORT_COMPILE_COMMANDS'] = 'ON'
if cfg['platform'] == 'ios':
defines['CMAKE_OSX_SYSROOT'] = xcrun.get_ios_sdk_sysroot()
- ios_team_id = settings.get(proj_dir, 'iosteam')
- if ios_team_id:
+ if ios_team_id := settings.get(proj_dir, 'iosteam'):
defines['FIPS_IOS_TEAMID'] = ios_team_id
if cfg['platform'] == 'osx':
defines['CMAKE_OSX_SYSROOT'] = xcrun.get_macos_sdk_sysroot()
do_it = force
if not os.path.isdir(build_dir) :
os.makedirs(build_dir)
- if not os.path.isfile(build_dir + '/CMakeCache.txt'):
+ if not os.path.isfile(f'{build_dir}/CMakeCache.txt'):
do_it = True
- if do_it :
+ if not do_it:
+ return True
# if Ninja build tool and on Windows, need to copy
# the precompiled ninja.exe to the build dir
- log.colored(log.YELLOW, "=== generating: {}".format(cfg['name']))
- log.info("config file: {}".format(cfg['path']))
- toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg)
- if toolchain_path :
- log.info("Using Toolchain File: {}".format(toolchain_path))
- if cfg['build_tool'] == 'ninja' :
- ninja.prepare_ninja_tool(fips_dir, build_dir)
- cmake_result = cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, toolchain_path, defines)
- if cfg['build_tool'] == 'vscode_cmake':
- vscode.write_workspace_settings(fips_dir, proj_dir, cfg)
- if cfg['build_tool'] == 'clion':
- clion.write_workspace_settings(fips_dir, proj_dir, cfg)
- return cmake_result
- else :
- return True
+ log.colored(log.YELLOW, f"=== generating: {cfg['name']}")
+ log.info(f"config file: {cfg['path']}")
+ toolchain_path = config.get_toolchain(fips_dir, proj_dir, cfg)
+ if toolchain_path:
+ log.info(f"Using Toolchain File: {toolchain_path}")
+ if cfg['build_tool'] == 'ninja' :
+ ninja.prepare_ninja_tool(fips_dir, build_dir)
+ cmake_result = cmake.run_gen(cfg, fips_dir, proj_dir, build_dir, toolchain_path, defines)
+ if cfg['build_tool'] == 'vscode_cmake':
+ vscode.write_workspace_settings(fips_dir, proj_dir, cfg)
+ if cfg['build_tool'] == 'clion':
+ clion.write_workspace_settings(fips_dir, proj_dir, cfg)
+ return cmake_result
#-------------------------------------------------------------------------------
-def gen(fips_dir, proj_dir, cfg_name) :
+def gen(fips_dir, proj_dir, cfg_name):
"""generate build files with cmake
:param fips_dir: absolute path to fips
@@ -118,29 +117,31 @@ def gen(fips_dir, proj_dir, cfg_name) :
# load the config(s)
configs = config.load(fips_dir, proj_dir, cfg_name)
num_valid_configs = 0
- if configs :
- for cfg in configs :
+ if configs:
+ for cfg in configs:
# check if config is valid
config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True)
- if config_valid :
- if gen_project(fips_dir, proj_dir, cfg, True) :
+ if config_valid:
+ if gen_project(fips_dir, proj_dir, cfg, True):
num_valid_configs += 1
- else :
- log.error("failed to generate build files for config '{}'".format(cfg['name']), False)
- else :
- log.error("'{}' is not a valid config".format(cfg['name']), False)
- else :
- log.error("No configs found for '{}'".format(cfg_name))
-
- if num_valid_configs != len(configs) :
- log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs)))
- return False
- else :
- log.colored(log.GREEN, '{} configs generated'.format(num_valid_configs))
+ else:
+ log.error(f"failed to generate build files for config '{cfg['name']}'", False)
+ else:
+ log.error(f"'{cfg['name']}' is not a valid config", False)
+ else:
+ log.error(f"No configs found for '{cfg_name}'")
+
+ if num_valid_configs != len(configs):
+ log.error(
+ f'{len(configs) - num_valid_configs} out of {len(configs)} configs failed!'
+ )
+ return False
+ else:
+ log.colored(log.GREEN, f'{num_valid_configs} configs generated')
return True
#-------------------------------------------------------------------------------
-def configure(fips_dir, proj_dir, cfg_name) :
+def configure(fips_dir, proj_dir, cfg_name):
"""run ccmake or cmake-gui on the provided project and config
:param fips_dir: absolute fips path
@@ -153,15 +154,13 @@ def configure(fips_dir, proj_dir, cfg_name) :
util.ensure_valid_project_dir(proj_dir)
dep.gather_and_write_imports(fips_dir, proj_dir, cfg_name)
- # load configs, if more then one, only use first one
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
cfg = configs[0]
- log.colored(log.YELLOW, '=== configuring: {}'.format(cfg['name']))
+ log.colored(log.YELLOW, f"=== configuring: {cfg['name']}")
# generate build files
- if not gen_project(fips_dir, proj_dir, cfg, True) :
- log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name))
+ if not gen_project(fips_dir, proj_dir, cfg, True):
+ log.error(f"Failed to generate '{cfg['name']}' of project '{proj_name}'")
# run ccmake or cmake-gui
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
@@ -171,11 +170,11 @@ def configure(fips_dir, proj_dir, cfg_name) :
cmake_gui.run(build_dir)
else :
log.error("Neither 'ccmake' nor 'cmake-gui' found (run 'fips diag')")
- else :
- log.error("No configs found for '{}'".format(cfg_name))
+ else:
+ log.error(f"No configs found for '{cfg_name}'")
#-------------------------------------------------------------------------------
-def make_clean(fips_dir, proj_dir, cfg_name) :
+def make_clean(fips_dir, proj_dir, cfg_name):
"""perform a 'make clean' on the project
:param fips_dir: absolute path of fips
@@ -186,11 +185,11 @@ def make_clean(fips_dir, proj_dir, cfg_name) :
proj_name = util.get_project_name_from_dir(proj_dir)
configs = config.load(fips_dir, proj_dir, cfg_name)
num_valid_configs = 0
- if configs :
- for cfg in configs :
+ if configs:
+ for cfg in configs:
config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True)
- if config_valid :
- log.colored(log.YELLOW, "=== cleaning: {}".format(cfg['name']))
+ if config_valid:
+ log.colored(log.YELLOW, f"=== cleaning: {cfg['name']}")
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
result = False
@@ -202,25 +201,27 @@ def make_clean(fips_dir, proj_dir, cfg_name) :
result = xcodebuild.run_clean(fips_dir, build_dir)
else :
result = cmake.run_clean(fips_dir, build_dir)
-
- if result :
+
+ if result:
num_valid_configs += 1
- else :
- log.error("Failed to clean config '{}' of project '{}'".format(cfg['name'], proj_name))
- else :
- log.error("Config '{}' not valid in this environment".format(cfg['name']))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
-
- if num_valid_configs != len(configs) :
- log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs)))
- return False
- else :
- log.colored(log.GREEN, '{} configs cleaned'.format(num_valid_configs))
+ else:
+ log.error(f"Failed to clean config '{cfg['name']}' of project '{proj_name}'")
+ else:
+ log.error(f"Config '{cfg['name']}' not valid in this environment")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
+
+ if num_valid_configs != len(configs):
+ log.error(
+ f'{len(configs) - num_valid_configs} out of {len(configs)} configs failed!'
+ )
+ return False
+ else:
+ log.colored(log.GREEN, f'{num_valid_configs} configs cleaned')
return True
#-------------------------------------------------------------------------------
-def build(fips_dir, proj_dir, cfg_name, target=None) :
+def build(fips_dir, proj_dir, cfg_name, target=None):
"""perform a build of config(s) in project
:param fips_dir: absolute path of fips
@@ -239,15 +240,15 @@ def build(fips_dir, proj_dir, cfg_name, target=None) :
# load the config(s)
configs = config.load(fips_dir, proj_dir, cfg_name)
num_valid_configs = 0
- if configs :
- for cfg in configs :
+ if configs:
+ for cfg in configs:
# check if config is valid
config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors=True)
- if config_valid :
- log.colored(log.YELLOW, "=== building: {}".format(cfg['name']))
+ if config_valid:
+ log.colored(log.YELLOW, f"=== building: {cfg['name']}")
- if not gen_project(fips_dir, proj_dir, cfg, False) :
- log.error("Failed to generate '{}' of project '{}'".format(cfg['name'], proj_name))
+ if not gen_project(fips_dir, proj_dir, cfg, False):
+ log.error(f"Failed to generate '{cfg['name']}' of project '{proj_name}'")
# select and run build tool
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
@@ -262,24 +263,26 @@ def build(fips_dir, proj_dir, cfg_name, target=None) :
else :
result = cmake.run_build(fips_dir, target, cfg['build_type'], build_dir, num_jobs)
- if result :
+ if result:
num_valid_configs += 1
- else :
- log.error("Failed to build config '{}' of project '{}'".format(cfg['name'], proj_name))
- else :
- log.error("Config '{}' not valid in this environment".format(cfg['name']))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
-
- if num_valid_configs != len(configs) :
- log.error('{} out of {} configs failed!'.format(len(configs) - num_valid_configs, len(configs)))
- return False
- else :
- log.colored(log.GREEN, '{} configs built'.format(num_valid_configs))
+ else:
+ log.error(f"Failed to build config '{cfg['name']}' of project '{proj_name}'")
+ else:
+ log.error(f"Config '{cfg['name']}' not valid in this environment")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
+
+ if num_valid_configs != len(configs):
+ log.error(
+ f'{len(configs) - num_valid_configs} out of {len(configs)} configs failed!'
+ )
+ return False
+ else:
+ log.colored(log.GREEN, f'{num_valid_configs} configs built')
return True
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd) :
+def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd):
"""run a build target executable
:param fips_dir: absolute path of fips
@@ -293,12 +296,13 @@ def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd) :
retcode = 10
proj_name = util.get_project_name_from_dir(proj_dir)
util.ensure_valid_project_dir(proj_dir)
-
- # load the config(s)
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
- for cfg in configs :
- log.colored(log.YELLOW, "=== run '{}' (config: {}, project: {}):".format(target_name, cfg['name'], proj_name))
+
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
+ for cfg in configs:
+ log.colored(
+ log.YELLOW,
+ f"=== run '{target_name}' (config: {cfg['name']}, project: {proj_name}):",
+ )
# find deploy dir where executables live
deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name'])
@@ -307,65 +311,69 @@ def run(fips_dir, proj_dir, cfg_name, target_name, target_args, target_cwd) :
if cfg['platform'] == 'emscripten':
# special case: emscripten app
- html_name = target_name + '.html'
- if util.get_host_platform() == 'osx' :
- try :
+ html_name = f'{target_name}.html'
+ if util.get_host_platform() == 'osx':
+ try:
subprocess.call(
- 'open http://localhost:8000/{} ; python {}/mod/httpserver.py'.format(html_name, fips_dir),
- cwd = target_cwd, shell=True)
+ f'open http://localhost:8000/{html_name} ; python {fips_dir}/mod/httpserver.py',
+ cwd=target_cwd,
+ shell=True,
+ )
except KeyboardInterrupt :
return 0
- elif util.get_host_platform() == 'win' :
- try :
- cmd = 'cmd /c start http://localhost:8000/{} && python {}/mod/httpserver.py'.format(html_name, fips_dir)
+ elif util.get_host_platform() == 'win':
+ try:
+ cmd = f'cmd /c start http://localhost:8000/{html_name} && python {fips_dir}/mod/httpserver.py'
subprocess.call(cmd, cwd = target_cwd, shell=True)
except KeyboardInterrupt :
return 0
- elif util.get_host_platform() == 'linux' :
- try :
+ elif util.get_host_platform() == 'linux':
+ try:
subprocess.call(
- 'xdg-open http://localhost:8000/{}; python {}/mod/httpserver.py'.format(html_name, fips_dir),
- cwd = target_cwd, shell=True)
+ f'xdg-open http://localhost:8000/{html_name}; python {fips_dir}/mod/httpserver.py',
+ cwd=target_cwd,
+ shell=True,
+ )
except KeyboardInterrupt :
return 0
- else :
+ else:
log.error("don't know how to start HTML app on this platform")
- elif cfg['platform'] == 'android' :
- try :
+ elif cfg['platform'] == 'android':
+ try:
adb_path = android.get_adb_path(fips_dir)
pkg_name = android.target_to_package_name(target_name)
# Android: first re-install the apk...
- cmd = '{} install -r {}.apk'.format(adb_path, target_name)
+ cmd = f'{adb_path} install -r {target_name}.apk'
subprocess.call(cmd, shell=True, cwd=deploy_dir)
# ...then start the apk
- cmd = '{} shell am start -n {}/android.app.NativeActivity'.format(adb_path, pkg_name)
+ cmd = f'{adb_path} shell am start -n {pkg_name}/android.app.NativeActivity'
subprocess.call(cmd, shell=True)
# ...then run adb logcat
- cmd = '{} logcat'.format(adb_path)
+ cmd = f'{adb_path} logcat'
subprocess.call(cmd, shell=True)
return 0
except KeyboardInterrupt :
return 0
- elif os.path.isdir('{}/{}.app'.format(deploy_dir, target_name)) :
+ elif os.path.isdir(f'{deploy_dir}/{target_name}.app'):
# special case: Mac app
- cmd_line = '{}/{}.app/Contents/MacOS/{}'.format(deploy_dir, target_name, target_name)
- else :
- cmd_line = '{}/{}'.format(deploy_dir, target_name)
- if cmd_line :
+ cmd_line = f'{deploy_dir}/{target_name}.app/Contents/MacOS/{target_name}'
+ else:
+ cmd_line = f'{deploy_dir}/{target_name}'
+ if cmd_line:
if target_args :
cmd_line += ' ' + ' '.join(target_args)
try:
retcode = subprocess.call(args=cmd_line, cwd=target_cwd, shell=True)
except OSError as e:
- log.error("Failed to execute '{}' with '{}'".format(target_name, e.strerror))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
+ log.error(f"Failed to execute '{target_name}' with '{e.strerror}'")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
return retcode
#-------------------------------------------------------------------------------
-def clean(fips_dir, proj_dir, cfg_name) :
+def clean(fips_dir, proj_dir, cfg_name):
"""clean build files
:param fips_dir: absolute path of fips
@@ -373,33 +381,32 @@ def clean(fips_dir, proj_dir, cfg_name) :
:param cfg_name: config name (or pattern)
"""
proj_name = util.get_project_name_from_dir(proj_dir)
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
num_cleaned_configs = 0
- for cfg in configs :
+ for cfg in configs:
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
build_dir_exists = os.path.isdir(build_dir)
deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name'])
deploy_dir_exists = os.path.isdir(deploy_dir)
- if build_dir_exists or deploy_dir_exists :
- log.colored(log.YELLOW, "=== clean: {}".format(cfg['name']))
+ if build_dir_exists or deploy_dir_exists:
+ log.colored(log.YELLOW, f"=== clean: {cfg['name']}")
num_cleaned_configs += 1
- if build_dir_exists :
+ if build_dir_exists:
shutil.rmtree(build_dir)
- log.info(" deleted '{}'".format(build_dir))
+ log.info(f" deleted '{build_dir}'")
- if deploy_dir_exists :
+ if deploy_dir_exists:
shutil.rmtree(deploy_dir)
- log.info(" deleted '{}'".format(deploy_dir))
- if num_cleaned_configs == 0 :
- log.colored(log.YELLOW, "=== clean: nothing to clean for {}".format(cfg_name))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
+ log.info(f" deleted '{deploy_dir}'")
+ if num_cleaned_configs == 0:
+ log.colored(log.YELLOW, f"=== clean: nothing to clean for {cfg_name}")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
#-------------------------------------------------------------------------------
-def get_target_list(fips_dir, proj_dir, cfg_name) :
+def get_target_list(fips_dir, proj_dir, cfg_name):
"""get project targets config name, only works
if a cmake run was performed before
@@ -408,9 +415,8 @@ def get_target_list(fips_dir, proj_dir, cfg_name) :
:param cfg_name: the config name
:returns: (success, targets)
"""
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
return util.get_cfg_target_list(fips_dir, proj_dir, configs[0])
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
diff --git a/mod/registry.py b/mod/registry.py
index 68e57423..a789d110 100644
--- a/mod/registry.py
+++ b/mod/registry.py
@@ -5,11 +5,11 @@
registry = None
#-------------------------------------------------------------------------------
-def load(fips_dir) :
+def load(fips_dir):
"""load registry on demand"""
global registry
- if registry is None :
- with open(fips_dir + '/registry.yml', 'r') as f :
+ if registry is None:
+ with open(f'{fips_dir}/registry.yml', 'r') as f:
registry = yaml.load(f)
#-------------------------------------------------------------------------------
@@ -24,7 +24,7 @@ def exists(fips_dir, proj_name) :
return proj_name in registry
#-------------------------------------------------------------------------------
-def lookup_url(fips_dir, proj_name) :
+def lookup_url(fips_dir, proj_name):
"""lookup git url for project name, return None if not found
:param fips_dir: absolute path to fips
@@ -32,13 +32,10 @@ def lookup_url(fips_dir, proj_name) :
:returns: git url from registry, or None
"""
load(fips_dir)
- if proj_name in registry :
- return registry[proj_name]
- else :
- return None
+ return registry[proj_name] if proj_name in registry else None
#-------------------------------------------------------------------------------
-def get_url(fips_dir, name_or_url) :
+def get_url(fips_dir, name_or_url):
"""Checks if name_or_url is in the registry, if yes, return url
from registry, otherwise return name_or_url. This is useful
if a parameter can be either a project name or a valid URL, and
@@ -49,7 +46,4 @@ def get_url(fips_dir, name_or_url) :
:returns: URL from registry, or the original name_or_url param
"""
load(fips_dir)
- if name_or_url in registry :
- return registry[name_or_url]
- else :
- return name_or_url
+ return registry[name_or_url] if name_or_url in registry else name_or_url
diff --git a/mod/settings.py b/mod/settings.py
index 0395212b..47cedbd0 100644
--- a/mod/settings.py
+++ b/mod/settings.py
@@ -6,14 +6,14 @@
from mod import log, util, config
#-------------------------------------------------------------------------------
-def load(proj_dir) :
+def load(proj_dir):
"""load the .fips-settings.yml file from project directory
:param proj_dir: absolute project directory
:returns: dictionary object
"""
settings = None
- path = proj_dir + '/.fips-settings.yml'
+ path = f'{proj_dir}/.fips-settings.yml'
if os.path.isfile(path) :
with open(path, 'r') as f :
settings = yaml.load(f)
@@ -22,37 +22,35 @@ def load(proj_dir) :
return settings
#-------------------------------------------------------------------------------
-def save(proj_dir, settings) :
+def save(proj_dir, settings):
"""save settings back to .fips-settings.yml file in project directory
:param proj_dir: absolute project directory
:param settings: settings dictionary object
"""
- path = proj_dir + '/.fips-settings.yml'
+ path = f'{proj_dir}/.fips-settings.yml'
with open(path, 'w') as f :
yaml.dump(settings, f)
#-------------------------------------------------------------------------------
-def get_default(key) :
+def get_default(key):
"""get the default value for a settings key
:param key: settings key
:returns: default value, or None if key is invalid
"""
- if key == 'config' :
+ if key == 'config':
return config.get_default_config()
- elif key == 'target' :
+ elif key == 'target' or key not in ['jobs', 'ccache']:
return None
- elif key == 'jobs' :
+ elif key == 'jobs':
# this is what ninja seems to do for default num jobs
return util.get_num_cpucores() + 2
- elif key == 'ccache' :
+ else:
return False
- else :
- return None
#-------------------------------------------------------------------------------
-def get(proj_dir, key) :
+def get(proj_dir, key):
"""return settings value by key, default value if the value
doesn't exist in the project-local settings file
@@ -62,17 +60,15 @@ def get(proj_dir, key) :
"""
util.ensure_valid_project_dir(proj_dir)
- value = None
settings = load(proj_dir)
- if key in settings :
- value = settings[key]
+ value = settings[key] if key in settings else None
if value is None :
value = get_default(key)
return value
#-------------------------------------------------------------------------------
-def set(proj_dir, key, value) :
+def set(proj_dir, key, value):
"""update a settings value by key and save project-local
.fips-settings file
@@ -91,10 +87,10 @@ def set(proj_dir, key, value) :
value_str = 'on' if value else 'off';
else :
value_str = str(value)
- log.info("'{}' set to '{}' in project '{}'".format(key, value_str, proj_name))
+ log.info(f"'{key}' set to '{value_str}' in project '{proj_name}'")
#-------------------------------------------------------------------------------
-def unset(proj_dir, key) :
+def unset(proj_dir, key):
"""delete a settings value from the project-local settings file
:param proj_dir: absolute project directory
@@ -108,5 +104,5 @@ def unset(proj_dir, key) :
save(proj_dir, settings)
proj_name = util.get_project_name_from_dir(proj_dir)
- log.info("'{}' unset in project '{}'".format(key, proj_name))
+ log.info(f"'{key}' unset in project '{proj_name}'")
diff --git a/mod/template.py b/mod/template.py
index 705487f3..b0b4c987 100644
--- a/mod/template.py
+++ b/mod/template.py
@@ -6,7 +6,7 @@
from mod import log, util
#-------------------------------------------------------------------------------
-def write_git_ignore(proj_dir, entries) :
+def write_git_ignore(proj_dir, entries):
"""modify or create the .gitignore file with fips-specific
entries. fips entries will go into a special section marked with:
#>fips
@@ -14,7 +14,7 @@ def write_git_ignore(proj_dir, entries) :
:param entries: array of fips .gitignore strings
"""
- path = proj_dir + '/.gitignore'
+ path = f'{proj_dir}/.gitignore'
out_lines = []
if os.path.isfile(path) :
# .gitignore already exists, read into lines array,
@@ -29,10 +29,8 @@ def write_git_ignore(proj_dir, entries) :
out_lines.append(l)
if '#fips\n')
- out_lines.append('# this area is managed by fips, do not edit\n')
+
+ out_lines.extend(('#>fips\n', '# this area is managed by fips, do not edit\n'))
out_lines.extend('\n'.join(entries) + '\n')
out_lines.append('#\n')
f.write('')
- ws_path = '{}/.idea/misc.xml'.format(proj_dir)
+ ws_path = f'{proj_dir}/.idea/misc.xml'
with open(ws_path, 'w') as f:
f.write('\n')
f.write('\n')
f.write(' \n')
f.write('')
- ws_path = '{}/.idea/modules.xml'.format(proj_dir)
+ ws_path = f'{proj_dir}/.idea/modules.xml'
with open(ws_path, 'w') as f:
f.write('\n')
f.write('\n')
f.write(' \n')
f.write(' \n')
- f.write(' \n'.format(proj_name, proj_name))
+ f.write(
+ f' \n'
+ )
f.write(' \n')
f.write(' \n')
f.write('')
@@ -83,9 +89,9 @@ def write_clion_module_files(fips_dir, proj_dir, cfg):
def write_clion_workspace_file(fips_dir, proj_dir, cfg):
'''write bare-bone workspace.xml config file'''
proj_name = util.get_project_name_from_dir(proj_dir)
- gen_options = '-DFIPS_CONFIG={}'.format(cfg['name'])
- gen_dir = '$PROJECT_DIR$/../fips-build/{}/{}'.format(proj_name, cfg['name'])
- ws_path = '{}/.idea/workspace.xml'.format(proj_dir)
+ gen_options = f"-DFIPS_CONFIG={cfg['name']}"
+ gen_dir = f"$PROJECT_DIR$/../fips-build/{proj_name}/{cfg['name']}"
+ ws_path = f'{proj_dir}/.idea/workspace.xml'
# do not overwrite existing .xml
if os.path.exists(ws_path):
return
@@ -95,7 +101,9 @@ def write_clion_workspace_file(fips_dir, proj_dir, cfg):
# TODO: CMakeRunConfigurationManager
f.write(' \n')
f.write(' \n')
- f.write(' \n'.format(gen_options, gen_dir))
+ f.write(
+ f' \n'
+ )
f.write(' \n')
f.write(' \n')
# TODO: RunManager
@@ -106,7 +114,7 @@ def write_workspace_settings(fips_dir, proj_dir, cfg):
'''write the CLion *.xml files required to open the project
'''
log.info("=== writing JetBrains CLion config files...")
- clion_dir = proj_dir + '/.idea'
+ clion_dir = f'{proj_dir}/.idea'
if not os.path.isdir(clion_dir):
os.makedirs(clion_dir)
write_clion_module_files(fips_dir, proj_dir, cfg)
@@ -115,13 +123,15 @@ def write_workspace_settings(fips_dir, proj_dir, cfg):
#-------------------------------------------------------------------------------
def cleanup(fips_dir, proj_dir):
'''deletes the .idea directory'''
- clion_dir = proj_dir + '/.idea'
+ clion_dir = f'{proj_dir}/.idea'
if os.path.isdir(clion_dir):
- log.info(log.RED + 'Please confirm to delete the following directory:' + log.DEF)
- log.info(' {}'.format(clion_dir))
- if util.confirm(log.RED + 'Delete this directory?' + log.DEF):
+ log.info(
+ f'{log.RED}Please confirm to delete the following directory:{log.DEF}'
+ )
+ log.info(f' {clion_dir}')
+ if util.confirm(f'{log.RED}Delete this directory?{log.DEF}'):
if os.path.isdir(clion_dir):
- log.info(' deleting {}'.format(clion_dir))
+ log.info(f' deleting {clion_dir}')
shutil.rmtree(clion_dir)
log.info('Done.')
else:
diff --git a/mod/tools/cmake.py b/mod/tools/cmake.py
index acfa5ff8..0a5e7e32 100644
--- a/mod/tools/cmake.py
+++ b/mod/tools/cmake.py
@@ -12,7 +12,7 @@
not_found = 'please install cmake 2.8 or newer'
#------------------------------------------------------------------------------
-def check_exists(fips_dir, major=2, minor=8) :
+def check_exists(fips_dir, major=2, minor=8):
"""test if cmake is in the path and has the required version
:returns: True if cmake found and is the required version
@@ -22,15 +22,15 @@ def check_exists(fips_dir, major=2, minor=8) :
ver = out.split()[2].split('.')
if int(ver[0]) > major or (int(ver[0]) == major and int(ver[1]) >= minor):
return True
- else :
- log.info('{}NOTE{}: cmake must be at least version {}.{} (found: {}.{}.{})'.format(
- log.RED, log.DEF, major, minor, ver[0],ver[1],ver[2]))
- return False
+ log.info(
+ f'{log.RED}NOTE{log.DEF}: cmake must be at least version {major}.{minor} (found: {ver[0]}.{ver[1]}.{ver[2]})'
+ )
+ return False
except (OSError, subprocess.CalledProcessError):
return False
#------------------------------------------------------------------------------
-def run_gen(cfg, fips_dir, project_dir, build_dir, toolchain_path, defines) :
+def run_gen(cfg, fips_dir, project_dir, build_dir, toolchain_path, defines):
"""run cmake tool to generate build files
:param cfg: a fips config object
@@ -40,36 +40,36 @@ def run_gen(cfg, fips_dir, project_dir, build_dir, toolchain_path, defines) :
:returns: True if cmake returned successful
"""
cmdLine = 'cmake'
- if cfg['generator'] != 'Default' :
- cmdLine += ' -G "{}"'.format(cfg['generator'])
- if cfg['generator-platform'] :
- cmdLine += ' -A "{}"'.format(cfg['generator-platform'])
- if cfg['generator-toolset'] :
- cmdLine += ' -T "{}"'.format(cfg['generator-toolset'])
- cmdLine += ' -DCMAKE_BUILD_TYPE={}'.format(cfg['build_type'])
+ if cfg['generator'] != 'Default':
+ cmdLine += f""" -G "{cfg['generator']}\""""
+ if cfg['generator-platform']:
+ cmdLine += f""" -A "{cfg['generator-platform']}\""""
+ if cfg['generator-toolset']:
+ cmdLine += f""" -T "{cfg['generator-toolset']}\""""
+ cmdLine += f" -DCMAKE_BUILD_TYPE={cfg['build_type']}"
if cfg['build_tool'] == 'ninja' and platform.system() == 'Windows':
- cmdLine += ' -DCMAKE_MAKE_PROGRAM={}'.format(ninja.get_ninja_tool(fips_dir))
- if toolchain_path is not None :
- cmdLine += ' -DCMAKE_TOOLCHAIN_FILE={}'.format(toolchain_path)
- cmdLine += ' -DFIPS_CONFIG={}'.format(cfg['name'])
- if cfg['defines'] is not None :
- for key in cfg['defines'] :
+ cmdLine += f' -DCMAKE_MAKE_PROGRAM={ninja.get_ninja_tool(fips_dir)}'
+ if toolchain_path is not None:
+ cmdLine += f' -DCMAKE_TOOLCHAIN_FILE={toolchain_path}'
+ cmdLine += f" -DFIPS_CONFIG={cfg['name']}"
+ if cfg['defines'] is not None:
+ for key in cfg['defines']:
val = cfg['defines'][key]
- if type(val) is bool :
- cmdLine += ' -D{}={}'.format(key, 'ON' if val else 'OFF')
- else :
- cmdLine += ' -D{}="{}"'.format(key, val)
- for key in defines :
- cmdLine += ' -D{}={}'.format(key, defines[key])
- cmdLine += ' -B' + build_dir
- cmdLine += ' -H' + project_dir
+ if type(val) is bool:
+ cmdLine += f" -D{key}={'ON' if val else 'OFF'}"
+ else:
+ cmdLine += f' -D{key}="{val}"'
+ for key in defines:
+ cmdLine += f' -D{key}={defines[key]}'
+ cmdLine += f' -B{build_dir}'
+ cmdLine += f' -H{project_dir}'
print(cmdLine)
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
#------------------------------------------------------------------------------
-def run_build(fips_dir, target, build_type, build_dir, num_jobs=1) :
+def run_build(fips_dir, target, build_type, build_dir, num_jobs=1):
"""run cmake in build mode
:param target: build target, can be None (builds all)
@@ -78,13 +78,13 @@ def run_build(fips_dir, target, build_type, build_dir, num_jobs=1) :
:param num_jobs: number of parallel jobs (default: 1)
:returns: True if cmake returns successful
"""
- cmdLine = 'cmake --build . --config {}'.format(build_type)
- if target :
- cmdLine += ' --target {}'.format(target)
- if platform.system() == 'Windows' :
- cmdLine += ' -- /nologo /verbosity:minimal /maxcpucount:{}'.format(num_jobs)
- else :
- cmdLine += ' -- -j{}'.format(num_jobs)
+ cmdLine = f'cmake --build . --config {build_type}'
+ if target:
+ cmdLine += f' --target {target}'
+ if platform.system() == 'Windows':
+ cmdLine += f' -- /nologo /verbosity:minimal /maxcpucount:{num_jobs}'
+ else:
+ cmdLine += f' -- -j{num_jobs}'
print(cmdLine)
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
diff --git a/mod/tools/git.py b/mod/tools/git.py
index b75cf5bb..c649c899 100644
--- a/mod/tools/git.py
+++ b/mod/tools/git.py
@@ -33,7 +33,7 @@ def check_exists_with_error():
return False
#-------------------------------------------------------------------------------
-def get_url_with_personal_access_token(url) :
+def get_url_with_personal_access_token(url):
"""return the repo url for private repos using a github personal access token
handles both git@github.com and https://github.com urls
@@ -48,8 +48,9 @@ def get_url_with_personal_access_token(url) :
github_https_url = 'https://github.com/'
github_gitssh_url = 'git@github.com:'
- github_personal_access_token = os.environ.get('FIPS_GITHUB_PERSONAL_ACCESS_TOKEN', None)
- if github_personal_access_token:
+ if github_personal_access_token := os.environ.get(
+ 'FIPS_GITHUB_PERSONAL_ACCESS_TOKEN', None
+ ):
url_remainder_start = 0
if url.startswith(github_https_url):
url_remainder_start = len(github_https_url)
@@ -57,12 +58,12 @@ def get_url_with_personal_access_token(url) :
url_remainder_start = len(github_gitssh_url)
if url_remainder_start > 0:
- url = 'https://' + github_personal_access_token + '@github.com/' + url[url_remainder_start:]
-
+ url = f'https://{github_personal_access_token}@github.com/{url[url_remainder_start:]}'
+
return url
#-------------------------------------------------------------------------------
-def clone(url, branch, depth, name, cwd) :
+def clone(url, branch, depth, name, cwd):
"""git clone a remote git repo
:param url: the git url to clone from
@@ -74,13 +75,13 @@ def clone(url, branch, depth, name, cwd) :
"""
check_exists_with_error()
cmd = 'git clone --recursive'
- if branch :
- cmd += ' --branch {} --single-branch'.format(branch)
- if depth :
- cmd += ' --depth {}'.format(depth)
+ if branch:
+ cmd += f' --branch {branch} --single-branch'
+ if depth:
+ cmd += f' --depth {depth}'
url = get_url_with_personal_access_token(url)
- cmd += ' {} {}'.format(url, name)
+ cmd += f' {url} {name}'
res = subprocess.call(cmd, cwd=cwd, shell=True)
return res == 0
@@ -96,7 +97,7 @@ def add(proj_dir, update=False):
try:
subprocess.check_call('git add .', cwd=proj_dir, shell=True)
except subprocess.CalledProcessError as e:
- log.error("'git add .' failed with '{}'".format(e.returncode))
+ log.error(f"'git add .' failed with '{e.returncode}'")
#-------------------------------------------------------------------------------
def commit(proj_dir, msg):
@@ -106,9 +107,9 @@ def commit(proj_dir, msg):
"""
check_exists_with_error()
try:
- subprocess.check_call('git commit -m "{}"'.format(msg), cwd=proj_dir, shell=True)
+ subprocess.check_call(f'git commit -m "{msg}"', cwd=proj_dir, shell=True)
except subprocess.CalledProcessError as e:
- log.error("'git commit' failed with '{}'".format(e.returncode))
+ log.error(f"'git commit' failed with '{e.returncode}'")
#-------------------------------------------------------------------------------
def commit_allow_empty(proj_dir, msg):
@@ -119,9 +120,11 @@ def commit_allow_empty(proj_dir, msg):
"""
check_exists_with_error()
try:
- subprocess.check_call('git commit --allow-empty -m "{}"'.format(msg), cwd=proj_dir, shell=True)
+ subprocess.check_call(
+ f'git commit --allow-empty -m "{msg}"', cwd=proj_dir, shell=True
+ )
except subprocess.CalledProcessError as e:
- log.error("'git commit' failed with '{}'".format(e.returncode))
+ log.error(f"'git commit' failed with '{e.returncode}'")
#-------------------------------------------------------------------------------
def push(proj_dir):
@@ -133,16 +136,16 @@ def push(proj_dir):
try:
res = subprocess.check_call('git push', cwd=proj_dir, shell=True)
except subprocess.CalledProcessError as e:
- log.error("'git push' failed with '{}'".format(e.returncode))
+ log.error(f"'git push' failed with '{e.returncode}'")
#-------------------------------------------------------------------------------
def has_local_changes(proj_dir):
"""checks if a git repo has uncommitted or unpushed changes (basically
anything which would make a git pull unsafe"""
check_exists_with_error()
- output = subprocess.check_output('git status --porcelain',
- cwd=proj_dir, shell=True).decode("utf-8")
- if output:
+ if output := subprocess.check_output(
+ 'git status --porcelain', cwd=proj_dir, shell=True
+ ).decode("utf-8"):
return True
# get current branch name and tracked remote if exists, this has
# either the form:
@@ -159,9 +162,11 @@ def has_local_changes(proj_dir):
else:
cur_branch = cur_status
cur_remote = ''
- output = subprocess.check_output('git log {}..{} --oneline'.format(cur_remote, cur_branch),
- cwd=proj_dir, shell=True).decode("utf-8")
- if output:
+ if output := subprocess.check_output(
+ f'git log {cur_remote}..{cur_branch} --oneline',
+ cwd=proj_dir,
+ shell=True,
+ ).decode("utf-8"):
return True
#-------------------------------------------------------------------------------
@@ -192,7 +197,7 @@ def update(proj_dir):
update_submodule(proj_dir)
return True
else:
- log.warn('skipping {}, uncommitted or unpushed changes!'.format(proj_dir))
+ log.warn(f'skipping {proj_dir}, uncommitted or unpushed changes!')
return False
#-------------------------------------------------------------------------------
@@ -218,15 +223,17 @@ def get_branches(proj_dir) :
return branches;
#-------------------------------------------------------------------------------
-def checkout(proj_dir, revision) :
+def checkout(proj_dir, revision):
"""checkout a specific revision hash of a repository
:param proj_dir: a git repo dir
:param revision: SHA1 hash of the commit
:returns: True if git returns successful
"""
- try :
- output = subprocess.check_output('git checkout {}'.format(revision), cwd=proj_dir, shell=True).decode("utf-8")
+ try:
+ output = subprocess.check_output(
+ f'git checkout {revision}', cwd=proj_dir, shell=True
+ ).decode("utf-8")
update_submodule(proj_dir)
return output.split(':')[0] != 'error'
except subprocess.CalledProcessError :
@@ -234,24 +241,21 @@ def checkout(proj_dir, revision) :
return None
#-------------------------------------------------------------------------------
-def has_uncommitted_files(proj_dir) :
+def has_uncommitted_files(proj_dir):
"""check whether a git repo has uncommitted files
:param proj_dir: a git repo dir
:returns: True/False and output string
"""
- try :
+ try:
output = subprocess.check_output('git status -s', cwd=proj_dir, shell=True).decode("utf-8")
- if len(output) > 0 :
- return True, output
- else :
- return False, output
+ return (True, output) if len(output) > 0 else (False, output)
except subprocess.CalledProcessError :
log.error("failed to call 'git status -s'")
return False, ''
#-------------------------------------------------------------------------------
-def get_remote_rev(proj_dir, remote_branch) :
+def get_remote_rev(proj_dir, remote_branch):
"""get the head rev of a remote branch
:param proj_dir: a git repo dir
@@ -259,34 +263,35 @@ def get_remote_rev(proj_dir, remote_branch) :
:returns: the revision string of the remote branch head or None
"""
tokens = remote_branch.split('/')
- try :
- output = subprocess.check_output('git ls-remote {} {}'.format(tokens[0], tokens[1]), cwd=proj_dir, shell=True).decode("utf-8")
+ try:
+ output = subprocess.check_output(
+ f'git ls-remote {tokens[0]} {tokens[1]}', cwd=proj_dir, shell=True
+ ).decode("utf-8")
# can return an empty string if the remote branch doesn't exist
- if output != '':
- return output.split()[0]
- else :
- return None
+ return output.split()[0] if output != '' else None
except subprocess.CalledProcessError :
log.error("failed to call 'git ls-remote'")
return None
#-------------------------------------------------------------------------------
-def get_local_rev(proj_dir, local_branch) :
+def get_local_rev(proj_dir, local_branch):
"""get the head rev of a local branch
:param proj_dir: a git repo dir
:param local_branch: local branch name (e.g. master)
:returns: the revision string of the local branch head or None
"""
- try :
- output = subprocess.check_output('git rev-parse {}'.format(local_branch), cwd=proj_dir, shell=True).decode("utf-8")
+ try:
+ output = subprocess.check_output(
+ f'git rev-parse {local_branch}', cwd=proj_dir, shell=True
+ ).decode("utf-8")
return output.rstrip()
except subprocess.CalledProcessError :
log.error("failed to call 'git rev-parse'")
return None
#-------------------------------------------------------------------------------
-def check_out_of_sync(proj_dir) :
+def check_out_of_sync(proj_dir):
"""check through all branches of the git repo in proj_dir and
returns an array of all branches that are out-of-sync with their
remote branches (either have unpushed local changes, or un-pulled
@@ -301,47 +306,43 @@ def check_out_of_sync(proj_dir) :
# first check whether there are uncommitted changes
status, status_output = has_uncommitted_files(proj_dir)
- if status :
+ if status:
out_of_sync = True
- log.warn("'{}' has uncommitted changes:".format(proj_dir))
+ log.warn(f"'{proj_dir}' has uncommitted changes:")
log.info(status_output)
# check whether local and remote branch are out of sync
branches_out_of_sync = False
branches = get_branches(proj_dir)
- if not branches :
- log.warn("'{}' no remote branches found".format(proj_dir))
- for local_branch in branches :
+ if not branches:
+ log.warn(f"'{proj_dir}' no remote branches found")
+ for local_branch in branches:
remote_branch = branches[local_branch]
- remote_rev = get_remote_rev(proj_dir, remote_branch)
-
- # remote_rev can be None if the remote branch doesn't exists,
- # this is not an error
- if remote_rev :
+ if remote_rev := get_remote_rev(proj_dir, remote_branch):
local_rev = get_local_rev(proj_dir, local_branch)
- if remote_rev != local_rev :
+ if remote_rev != local_rev:
out_of_sync = True
if not branches_out_of_sync:
# only show this once
- log.warn("'{}' branches out of sync:".format(proj_dir))
+ log.warn(f"'{proj_dir}' branches out of sync:")
branches_out_of_sync = True
- log.info(" {}: {}".format(local_branch, local_rev))
- log.info(" {}: {}".format(remote_branch, remote_rev))
-
+ log.info(f" {local_branch}: {local_rev}")
+ log.info(f" {remote_branch}: {remote_rev}")
+
return out_of_sync
#-------------------------------------------------------------------------------
-def check_branch_out_of_sync(proj_dir, branch) :
+def check_branch_out_of_sync(proj_dir, branch):
"""check if a single branch is out of sync with remote repo"""
check_exists_with_error()
out_of_sync = False
remote_branches = get_branches(proj_dir)
local_rev = get_local_rev(proj_dir, branch)
- if branch in remote_branches :
+ if branch in remote_branches:
remote_rev = get_remote_rev(proj_dir, remote_branches[branch])
out_of_sync = remote_rev != local_rev
- else :
- log.warn("'{}' no remote branch found for '{}'".format(proj_dir, branch))
+ else:
+ log.warn(f"'{proj_dir}' no remote branch found for '{branch}'")
return out_of_sync
diff --git a/mod/tools/java.py b/mod/tools/java.py
index cb8e0ca2..e29f6946 100644
--- a/mod/tools/java.py
+++ b/mod/tools/java.py
@@ -8,7 +8,7 @@
not_found = "version 8 required for Android development, installed with the Java JDK"
#------------------------------------------------------------------------------
-def check_exists(fips_dir) :
+def check_exists(fips_dir):
try :
res = subprocess.check_output(['java', '-version'],
stderr=subprocess.STDOUT,
@@ -16,7 +16,5 @@ def check_exists(fips_dir) :
except (OSError, subprocess.CalledProcessError) :
return False
ver = re.search("version \"([^\\s]+)\"", res)
- if not ver or not ver.group(1).startswith('1.8') :
- return False
- return True
+ return bool(ver and ver.group(1).startswith('1.8'))
diff --git a/mod/tools/javac.py b/mod/tools/javac.py
index b531f702..170cd771 100644
--- a/mod/tools/javac.py
+++ b/mod/tools/javac.py
@@ -8,7 +8,7 @@
not_found = "version 8 required for Android development, installed with the Java JDK"
#------------------------------------------------------------------------------
-def check_exists(fips_dir) :
+def check_exists(fips_dir):
try :
res = subprocess.check_output(['javac', '-version'],
stderr=subprocess.STDOUT,
@@ -16,6 +16,4 @@ def check_exists(fips_dir) :
except (OSError, subprocess.CalledProcessError) :
return False
ver = re.search("javac ([^\\s]+)", res)
- if not ver or not ver.group(1).startswith('1.8') :
- return False
- return True
+ return bool(ver and ver.group(1).startswith('1.8'))
diff --git a/mod/tools/make.py b/mod/tools/make.py
index 475341f4..ab853ba1 100644
--- a/mod/tools/make.py
+++ b/mod/tools/make.py
@@ -21,7 +21,7 @@ def check_exists(fips_dir) :
return False
#-------------------------------------------------------------------------------
-def run_build(fips_dir, target, build_dir, num_jobs=1) :
+def run_build(fips_dir, target, build_dir, num_jobs=1):
"""make a build target
:param target: name of build target, or None
@@ -29,9 +29,9 @@ def run_build(fips_dir, target, build_dir, num_jobs=1) :
:param num_jobs: number of jobs, default is 1
:returns: True if build was successful
"""
- cmdLine = 'make -j{}'.format(num_jobs)
- if target is not None :
- cmdLine += ' ' + target;
+ cmdLine = f'make -j{num_jobs}'
+ if target is not None:
+ cmdLine += f' {target}';
print(cmdLine)
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
diff --git a/mod/tools/ninja.py b/mod/tools/ninja.py
index 772f748a..ad70ad8f 100644
--- a/mod/tools/ninja.py
+++ b/mod/tools/ninja.py
@@ -9,19 +9,16 @@
not_found = "required for building '*-ninja-*' configs"
#-------------------------------------------------------------------------------
-def get_ninja_name() :
- if platform.system() == 'Windows' :
- return 'ninja.exe'
- else :
- return 'ninja'
+def get_ninja_name():
+ return 'ninja.exe' if platform.system() == 'Windows' else 'ninja'
#-------------------------------------------------------------------------------
-def get_ninja_tool(fips_dir) :
+def get_ninja_tool(fips_dir):
"""get the ninja tool exe"""
- if platform.system() == 'Windows' :
+ if platform.system() == 'Windows':
# on Windows, use the precompiled ninja.exe coming with fips
- return fips_dir + '/tools/win32/' + get_ninja_name()
- else :
+ return f'{fips_dir}/tools/win32/{get_ninja_name()}'
+ else:
# everywhere else, expect it in the path
return get_ninja_name()
@@ -34,19 +31,19 @@ def prepare_ninja_tool(fips_dir, build_dir) :
shutil.copy(get_ninja_tool(fips_dir), build_dir)
#------------------------------------------------------------------------------
-def check_exists(fips_dir) :
+def check_exists(fips_dir):
"""test if ninja is in the path
:returns: True if ninja is in the path
"""
try:
- out = subprocess.check_output(['{}'.format(get_ninja_tool(fips_dir)), '--version'])
+ out = subprocess.check_output([f'{get_ninja_tool(fips_dir)}', '--version'])
return True
except (OSError, subprocess.CalledProcessError):
return False;
#-------------------------------------------------------------------------------
-def run_build(fips_dir, target, build_dir, num_jobs=1) :
+def run_build(fips_dir, target, build_dir, num_jobs=1):
"""build a target
:param target: name of build target, of None
@@ -57,21 +54,21 @@ def run_build(fips_dir, target, build_dir, num_jobs=1) :
if not target :
target = 'all'
prepare_ninja_tool(fips_dir, build_dir)
- cmdLine = "{} -j {} {}".format(get_ninja_name(), num_jobs, target)
+ cmdLine = f"{get_ninja_name()} -j {num_jobs} {target}"
print(cmdLine)
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
#-------------------------------------------------------------------------------
-def run_clean(fips_dir, build_dir) :
+def run_clean(fips_dir, build_dir):
"""run the special 'clean' target
:param build_dir: directory of the build.ninja file
:returns: True if ninja returned without error
"""
prepare_ninja_tool(fips_dir, build_dir)
- try :
- cmdLine = '{} clean'.format(get_ninja_name())
+ try:
+ cmdLine = f'{get_ninja_name()} clean'
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
except (OSError, subprocess.CalledProcessError) :
diff --git a/mod/tools/vscode.py b/mod/tools/vscode.py
index 16135d0f..92b18d62 100644
--- a/mod/tools/vscode.py
+++ b/mod/tools/vscode.py
@@ -26,7 +26,11 @@ def check_exists(fips_dir) :
def run(proj_dir):
try:
proj_name = util.get_project_name_from_dir(proj_dir)
- subprocess.call('code .vscode/{}.code-workspace'.format(proj_name), cwd=proj_dir, shell=True)
+ subprocess.call(
+ f'code .vscode/{proj_name}.code-workspace',
+ cwd=proj_dir,
+ shell=True,
+ )
except OSError:
log.error("Failed to run Visual Studio Code as 'code'")
@@ -38,14 +42,13 @@ def read_cmake_targets(fips_dir, proj_dir, cfg, types):
'''
success, targets = util.get_cfg_target_list(fips_dir, proj_dir, cfg)
if success:
- if types:
- matching_targets = [tgt for tgt in targets if targets[tgt] in types]
- else:
- matching_targets = targets.keys()
- return matching_targets
- else:
- log.error('Failed to read fips_targets.yml from build dir')
- return None
+ return (
+ [tgt for tgt in targets if targets[tgt] in types]
+ if types
+ else targets.keys()
+ )
+ log.error('Failed to read fips_targets.yml from build dir')
+ return None
#------------------------------------------------------------------------------
def read_cmake_headerdirs(fips_dir, proj_dir, cfg):
@@ -73,13 +76,13 @@ def read_cmake_defines(fips_dir, proj_dir, cfg):
for define in val:
if define not in result:
result.append(define)
- log.info(' {}'.format(define))
+ log.info(f' {define}')
if 'vscode_additional_defines' in cfg:
log.info(' defines from build config (vscode_additional_defines):')
for define in cfg['vscode_additional_defines']:
if define not in result:
result.append(define)
- log.info(' {}'.format(define))
+ log.info(f' {define}')
else:
log.info(' no additional defines from build config (vscode_additional_defines)')
return result
@@ -149,13 +152,10 @@ def get_vs_header_paths(fips_dir, proj_dir, cfg):
# Windows system headers are in 2 locations, first find the latest Windows Kit
result = []
- kits = glob.glob('C:/Program Files (x86)/Windows Kits/10/Include/*/')
- if kits:
+ if kits := glob.glob('C:/Program Files (x86)/Windows Kits/10/Include/*/'):
latest = max(kits).replace('\\','/')
- subdirs = glob.glob(latest + '/*/')
- for d in subdirs:
- result.append(d.replace('\\','/'))
-
+ subdirs = glob.glob(f'{latest}/*/')
+ result.extend(d.replace('\\','/') for d in subdirs)
# next get the used active Visual Studio instance from the cmake cache
proj_name = util.get_project_name_from_dir(proj_dir)
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
@@ -164,7 +164,7 @@ def get_vs_header_paths(fips_dir, proj_dir, cfg):
if line.startswith('CMAKE_LINKER:FILEPATH='):
bin_index = line.find('/bin/')
if bin_index > 0:
- result.append(line[22:bin_index+1]+'include')
+ result.append(f'{line[22:bin_index + 1]}include')
return result
#------------------------------------------------------------------------------
@@ -193,12 +193,14 @@ def write_tasks_json(fips_dir, proj_dir, vscode_dir, cfg):
}
# write the actual tasks
for tgt in all_targets:
- tasks['tasks'].append({
- 'label': tgt,
- 'command': 'python fips make {}'.format(tgt),
- 'group': 'build',
- 'problemMatcher': [ problem_matcher() ],
- })
+ tasks['tasks'].append(
+ {
+ 'label': tgt,
+ 'command': f'python fips make {tgt}',
+ 'group': 'build',
+ 'problemMatcher': [problem_matcher()],
+ }
+ )
tasks['tasks'].append({
'label': 'ALL',
'command': 'python fips build',
@@ -208,7 +210,7 @@ def write_tasks_json(fips_dir, proj_dir, vscode_dir, cfg):
},
'problemMatcher': [ problem_matcher() ],
})
- task_path = vscode_dir + '/tasks.json'
+ task_path = f'{vscode_dir}/tasks.json'
with open(task_path, 'w') as f:
json.dump(tasks, f, indent=1, separators=(',',':'))
@@ -230,11 +232,11 @@ def write_launch_json(fips_dir, proj_dir, vscode_dir, cfg):
for tgt in exe_targets:
for pre_launch_build in pre_launch_build_options:
for stop_at_entry in stop_at_entry_options:
- path = deploy_dir + '/' + tgt
+ path = f'{deploy_dir}/{tgt}'
if util.get_host_platform() == 'win':
path += '.exe'
cwd = os.path.dirname(path)
- osx_path = path + '.app/Contents/MacOS/' + tgt
+ osx_path = f'{path}.app/Contents/MacOS/{tgt}'
osx_cwd = os.path.dirname(osx_path)
if os.path.isdir(osx_cwd):
path = osx_path
@@ -287,39 +289,39 @@ def write_launch_json(fips_dir, proj_dir, vscode_dir, cfg):
'request': 'launch',
'stopOnEntry': True,
'pythonPath': '${config:python.pythonPath}',
- 'program': build_dir + '/fips-gen.py',
- 'args': [ build_dir + '/fips_codegen.yml' ],
+ 'program': f'{build_dir}/fips-gen.py',
+ 'args': [f'{build_dir}/fips_codegen.yml'],
"cwd": proj_dir,
"debugOptions": [
"WaitOnAbnormalExit",
"WaitOnNormalExit",
- "RedirectOutput"
- ]
+ "RedirectOutput",
+ ],
}
launch['configurations'].append(c)
# add a python debug config for each fips verb
- for verb_name, verb_mod in verb.verbs.items() :
+ for verb_name, verb_mod in verb.verbs.items():
# ignore standard verbs
if fips_dir not in inspect.getfile(verb_mod):
c = {
- 'name': 'fips {}'.format(verb_name),
+ 'name': f'fips {verb_name}',
'type': 'python',
'request': 'launch',
'stopOnEntry': True,
'pythonPath': '${config:python.pythonPath}',
- 'program': proj_dir + '/fips',
- 'args': [ verb_name ],
+ 'program': f'{proj_dir}/fips',
+ 'args': [verb_name],
'cwd': proj_dir,
"debugOptions": [
"WaitOnAbnormalExit",
"WaitOnNormalExit",
- "RedirectOutput"
- ]
+ "RedirectOutput",
+ ],
}
launch['configurations'].append(c)
- launch_path = vscode_dir + '/launch.json'
- log.info(' writing {}'.format(launch_path))
+ launch_path = f'{vscode_dir}/launch.json'
+ log.info(f' writing {launch_path}')
with open(launch_path, 'w') as f:
json.dump(launch, f, indent=1, separators=(',',':'))
@@ -331,7 +333,7 @@ def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg):
proj_name = util.get_project_name_from_dir(proj_dir)
build_dir = util.get_build_dir(fips_dir, proj_name, cfg['name'])
defines = read_cmake_defines(fips_dir, proj_dir, cfg)
- compile_commands_path = build_dir + '/compile_commands.json'
+ compile_commands_path = f'{build_dir}/compile_commands.json'
has_compile_command_json = os.path.isfile(compile_commands_path)
inc_paths = None
if not has_compile_command_json:
@@ -345,8 +347,8 @@ def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg):
'name': config_name,
'browse': {
'limitSymbolsToIncludeHeaders': True,
- 'databaseFilename': '{}/browse.VS.code'.format(build_dir)
- }
+ 'databaseFilename': f'{build_dir}/browse.VS.code',
+ },
}
config_incl_paths = None
compiler_path = None
@@ -369,7 +371,7 @@ def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg):
if inc_paths:
config_incl_paths.extend(inc_paths)
config_defines.extend(defines)
-
+
if compiler_path:
c['compilerPath'] = compiler_path
if has_compile_command_json:
@@ -381,15 +383,15 @@ def write_c_cpp_properties_json(fips_dir, proj_dir, impex, cfg):
c['browse']['path'] = config_incl_paths
c['intelliSenseMode'] = intellisense_mode
props['configurations'].append(c)
-
+
# add dependencies in reverse order, so that main project is first
for dep_proj_name in reversed(impex):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- vscode_dir = dep_proj_dir + '/.vscode'
+ vscode_dir = f'{dep_proj_dir}/.vscode'
if not os.path.isdir(vscode_dir):
os.makedirs(vscode_dir)
- prop_path = vscode_dir + '/c_cpp_properties.json'
- log.info(' writing {}'.format(prop_path))
+ prop_path = f'{vscode_dir}/c_cpp_properties.json'
+ log.info(f' writing {prop_path}')
with open(prop_path, 'w') as f:
json.dump(props, f, indent=1, separators=(',',':'))
@@ -404,15 +406,15 @@ def write_cmake_tools_settings(fips_dir, proj_dir, vscode_dir, cfg):
'FIPS_CONFIG:': cfg['name']
}
}
- settings_path = vscode_dir + '/settings.json'
- log.info(' writing {}'.format(settings_path))
+ settings_path = f'{vscode_dir}/settings.json'
+ log.info(f' writing {settings_path}')
with open(settings_path, 'w') as f:
json.dump(settings, f, indent=1, separators=(',',':'))
#-------------------------------------------------------------------------------
def write_code_workspace_file(fips_dir, proj_dir, impex, cfg):
'''write a multiroot-workspace config file'''
- vscode_dir = proj_dir + '/.vscode'
+ vscode_dir = f'{proj_dir}/.vscode'
ws = {
'folders': [],
'settings': {}
@@ -422,8 +424,8 @@ def write_code_workspace_file(fips_dir, proj_dir, impex, cfg):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
ws['folders'].append({ 'path': dep_proj_dir })
proj_name = util.get_project_name_from_dir(proj_dir)
- ws_path = '{}/{}.code-workspace'.format(vscode_dir, proj_name)
- log.info(' writing {}'.format(ws_path))
+ ws_path = f'{vscode_dir}/{proj_name}.code-workspace'
+ log.info(f' writing {ws_path}')
with open(ws_path, 'w') as f:
json.dump(ws, f, indent=1, separators=(',',':'))
@@ -434,13 +436,13 @@ def remove_vscode_tasks_launch_files(fips_dir, proj_dir, impex, cfg):
'''
for dep_proj_name in reversed(impex):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- tasks_path = dep_proj_dir + '/.vscode/tasks.json'
- launch_path = dep_proj_dir + '/.vscode/launch.json'
+ tasks_path = f'{dep_proj_dir}/.vscode/tasks.json'
+ launch_path = f'{dep_proj_dir}/.vscode/launch.json'
if os.path.exists(tasks_path):
- log.info(' deleting {}'.format(tasks_path))
+ log.info(f' deleting {tasks_path}')
os.remove(tasks_path)
if os.path.exists(launch_path):
- log.info(' deleting {}'.format(launch_path))
+ log.info(f' deleting {launch_path}')
os.remove(launch_path)
#-------------------------------------------------------------------------------
@@ -449,7 +451,7 @@ def write_workspace_settings(fips_dir, proj_dir, cfg):
c_cpp_properties.json files from cmake output files
'''
log.info("=== writing Visual Studio Code config files...")
- vscode_dir = proj_dir + '/.vscode'
+ vscode_dir = f'{proj_dir}/.vscode'
if not os.path.isdir(vscode_dir):
os.makedirs(vscode_dir)
# fetch all project dependencies
@@ -474,18 +476,20 @@ def cleanup(fips_dir, proj_dir):
success, impex = dep.get_all_imports_exports(fips_dir, proj_dir)
if not success :
log.warn("missing import project directories, please run 'fips fetch'")
- log.info(log.RED + 'Please confirm to delete the following directories:' + log.DEF)
+ log.info(
+ f'{log.RED}Please confirm to delete the following directories:{log.DEF}'
+ )
for dep_proj_name in reversed(impex):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- vscode_dir = dep_proj_dir + '/.vscode/'
+ vscode_dir = f'{dep_proj_dir}/.vscode/'
if os.path.isdir(vscode_dir):
- log.info(' {}'.format(vscode_dir))
- if util.confirm(log.RED + 'Delete those directories?' + log.DEF):
+ log.info(f' {vscode_dir}')
+ if util.confirm(f'{log.RED}Delete those directories?{log.DEF}'):
for dep_proj_name in reversed(impex):
dep_proj_dir = util.get_project_dir(fips_dir, dep_proj_name)
- vscode_dir = dep_proj_dir + '/.vscode/'
+ vscode_dir = f'{dep_proj_dir}/.vscode/'
if os.path.isdir(vscode_dir):
- log.info(' deleting {}'.format(vscode_dir))
+ log.info(f' deleting {vscode_dir}')
shutil.rmtree(vscode_dir)
log.info('Done.')
else:
diff --git a/mod/tools/xcodebuild.py b/mod/tools/xcodebuild.py
index cc12cd7d..420a2f79 100644
--- a/mod/tools/xcodebuild.py
+++ b/mod/tools/xcodebuild.py
@@ -19,7 +19,7 @@ def check_exists(fips_dir) :
return False
#------------------------------------------------------------------------------
-def run_build(fips_dir, target, build_type, build_dir, num_jobs=1) :
+def run_build(fips_dir, target, build_type, build_dir, num_jobs=1):
"""build a target
:param target: name of build target, or None
@@ -30,7 +30,7 @@ def run_build(fips_dir, target, build_type, build_dir, num_jobs=1) :
"""
if not target :
target = "ALL_BUILD"
- cmdLine = 'xcodebuild -jobs {} -configuration {} -target {}'.format(num_jobs, build_type, target)
+ cmdLine = f'xcodebuild -jobs {num_jobs} -configuration {build_type} -target {target}'
print(cmdLine)
res = subprocess.call(cmdLine, cwd=build_dir, shell=True)
return res == 0
diff --git a/mod/util.py b/mod/util.py
index a6a9057c..29e25eef 100644
--- a/mod/util.py
+++ b/mod/util.py
@@ -32,17 +32,17 @@ def get_workspace_dir(fips_dir) :
return os.path.split(fips_dir)[0]
#-------------------------------------------------------------------------------
-def get_project_dir(fips_dir, proj_name) :
+def get_project_dir(fips_dir, proj_name):
"""get absolute path to project directory in same workspace as fips
:param fips_dir: absolute path of fips
:param proj_name: project name
:returns: absolute path to project in same directory as fips
"""
- return get_workspace_dir(fips_dir) + '/' + proj_name
+ return f'{get_workspace_dir(fips_dir)}/{proj_name}'
#-------------------------------------------------------------------------------
-def get_build_dir(fips_dir, proj_name, cfg) :
+def get_build_dir(fips_dir, proj_name, cfg):
"""get absolute path to build directory in same workspace as fips for
given configuration
@@ -52,10 +52,10 @@ def get_build_dir(fips_dir, proj_name, cfg) :
:returns: absolute path of build directory
"""
cfg_name = cfg if type(cfg) == str else cfg['name']
- return '{}/fips-build/{}/{}'.format(get_workspace_dir(fips_dir), proj_name, cfg_name)
+ return f'{get_workspace_dir(fips_dir)}/fips-build/{proj_name}/{cfg_name}'
#-------------------------------------------------------------------------------
-def get_deploy_dir(fips_dir, proj_name, cfg) :
+def get_deploy_dir(fips_dir, proj_name, cfg):
"""get absolute path to deploy directory in same workspace as fips
:param fips_dir: absolute path of fips
@@ -64,7 +64,7 @@ def get_deploy_dir(fips_dir, proj_name, cfg) :
:returns: absolute path of deploy directory
"""
cfg_name = cfg if type(cfg) == str else cfg['name']
- return '{}/fips-deploy/{}/{}'.format(get_workspace_dir(fips_dir), proj_name, cfg_name)
+ return f'{get_workspace_dir(fips_dir)}/fips-deploy/{proj_name}/{cfg_name}'
#-------------------------------------------------------------------------------
def get_fips_dir(proj_dir, name):
@@ -80,8 +80,8 @@ def get_fips_dir(proj_dir, name):
:param proj_dir: absolute path of project directory
:name: the name without the 'fips-' prefix
"""
- d0 = proj_dir + '/fips-' + name
- d1 = proj_dir + '/fips-files/' + name
+ d0 = f'{proj_dir}/fips-{name}'
+ d1 = f'{proj_dir}/fips-files/{name}'
if os.path.isdir(d0):
return d0
elif os.path.isdir(d1):
@@ -140,7 +140,7 @@ def get_giturl_from_url(url) :
return url.split('#')[0]
#-------------------------------------------------------------------------------
-def get_gitbranch_from_url(url) :
+def get_gitbranch_from_url(url):
"""extracts the branch name from an url string
(after the optional '#'), returns 'master' if no branch name
specified.
@@ -148,10 +148,7 @@ def get_gitbranch_from_url(url) :
:param url: an url string, with optional '#' branch name appended
:returns: the extracted branch name, or 'master'
"""
- if '#' in url :
- return url.split('#')[1]
- else :
- return 'master'
+ return url.split('#')[1] if '#' in url else 'master'
#-------------------------------------------------------------------------------
def get_project_name_from_url(url) :
@@ -172,14 +169,14 @@ def get_project_name_from_dir(proj_dir) :
return os.path.split(proj_dir)[1]
#-------------------------------------------------------------------------------
-def load_fips_yml(proj_dir) :
+def load_fips_yml(proj_dir):
"""load the fips.yml file from project directory
:param proj_dir: absolute project directory
:returns: dictionary object
"""
dic = None
- path = proj_dir + '/fips.yml'
+ path = f'{proj_dir}/fips.yml'
if os.path.isfile(path) :
with open(path, 'r') as f:
dic = yaml.load(f)
@@ -188,7 +185,7 @@ def load_fips_yml(proj_dir) :
return dic
#-------------------------------------------------------------------------------
-def lookup_target_cwd(proj_dir, target) :
+def lookup_target_cwd(proj_dir, target):
"""lookup optional working directory for target from fips.yml,
return None if no cwd has been specified for this target in fips.yml
@@ -198,35 +195,32 @@ def lookup_target_cwd(proj_dir, target) :
"""
target_cwd = None
dic = load_fips_yml(proj_dir)
- if 'run' in dic :
- if target in dic['run'] :
- if 'cwd' in dic['run'][target] :
- target_cwd = proj_dir + '/' + dic['run'][target]['cwd']
+ if 'run' in dic:
+ if target in dic['run']:
+ if 'cwd' in dic['run'][target]:
+ target_cwd = f'{proj_dir}/' + dic['run'][target]['cwd']
return target_cwd
#-------------------------------------------------------------------------------
-def is_valid_project_dir(proj_dir) :
+def is_valid_project_dir(proj_dir):
"""test if the provided directory is a valid fips project (has a
fips.yml file)
:param proj_dir: absolute project directory to check
:returns: True if a valid fips project
"""
- if os.path.isdir(proj_dir) :
- if not os.path.isfile(proj_dir + '/fips.yml') :
- return False
- return True
- else :
+ if not os.path.isdir(proj_dir):
return False
+ return bool(os.path.isfile(f'{proj_dir}/fips.yml'))
#-------------------------------------------------------------------------------
-def ensure_valid_project_dir(proj_dir) :
+def ensure_valid_project_dir(proj_dir):
"""test if project dir is valid, if not, dump error and abort
:param proj_dir: absolute project directory to check
"""
- if not is_valid_project_dir(proj_dir) :
- log.error("'{}' is not a valid project directory".format(proj_dir))
+ if not is_valid_project_dir(proj_dir):
+ log.error(f"'{proj_dir}' is not a valid project directory")
#-------------------------------------------------------------------------------
def is_git_url(url) :
@@ -240,15 +234,15 @@ def is_git_url(url) :
return url[-4:] == '.git'
#-------------------------------------------------------------------------------
-def confirm(question) :
+def confirm(question):
"""ask user to confirm (y/N)
:param question: the question to confirm
:return: True: user pressed 'y', False: user pressed 'n'
"""
validAnswers={'': False, 'yes': True, 'ye': True, 'y': True, 'no': False, 'n': False }
- while True :
- sys.stdout.write(question + ' [y/N]: ')
+ while True:
+ sys.stdout.write(f'{question} [y/N]: ')
choice = raw_input().lower()
if choice in validAnswers :
return validAnswers[choice]
@@ -256,10 +250,10 @@ def confirm(question) :
log.info("please respond with 'y', 'yes', 'n' or 'no'")
#-------------------------------------------------------------------------------
-def url_download_hook(count, block_size, total_size) :
+def url_download_hook(count, block_size, total_size):
"""a download progress hook for urllib"""
percent = int(count * block_size * 100 / total_size)
- sys.stdout.write('\r{}%'.format(percent))
+ sys.stdout.write(f'\r{percent}%')
#-------------------------------------------------------------------------------
def get_host_platform() :
@@ -276,7 +270,7 @@ def get_host_platform() :
def get_cfg_target_list(fips_dir, proj_dir, cfg):
proj_name = get_project_name_from_dir(proj_dir)
build_dir = get_build_dir(fips_dir, proj_name, cfg)
- targets_path = build_dir + '/fips_targets.yml'
+ targets_path = f'{build_dir}/fips_targets.yml'
if os.path.isfile(targets_path) :
targets = []
with open(targets_path) as f :
@@ -289,7 +283,7 @@ def get_cfg_target_list(fips_dir, proj_dir, cfg):
def get_cfg_headersdirs_by_target(fips_dir, proj_dir, cfg):
proj_name = get_project_name_from_dir(proj_dir)
build_dir = get_build_dir(fips_dir, proj_name, cfg)
- path = build_dir + '/fips_headerdirs.yml'
+ path = f'{build_dir}/fips_headerdirs.yml'
if os.path.isfile(path):
headerdirs = {}
with open(path) as f:
@@ -302,7 +296,7 @@ def get_cfg_headersdirs_by_target(fips_dir, proj_dir, cfg):
def get_cfg_defines_by_target(fips_dir, proj_dir, cfg):
proj_name = get_project_name_from_dir(proj_dir)
build_dir = get_build_dir(fips_dir, proj_name, cfg)
- path = build_dir + '/fips_defines.yml'
+ path = f'{build_dir}/fips_defines.yml'
if os.path.isfile(path):
defines = {}
with open(path) as f:
diff --git a/mod/verb.py b/mod/verb.py
index 7dcb2feb..2132511d 100644
--- a/mod/verb.py
+++ b/mod/verb.py
@@ -15,7 +15,7 @@
proj_verbs = OrderedDict()
#-------------------------------------------------------------------------------
-def import_verbs_from(proj_name, proj_dir, verb_dir) :
+def import_verbs_from(proj_name, proj_dir, verb_dir):
"""import all verb modules from a directory, populates the
verb and proj_verbs global variables
@@ -28,9 +28,7 @@ def import_verbs_from(proj_name, proj_dir, verb_dir) :
sys.path.insert(0, proj_dir)
if verb_dir and os.path.isdir(verb_dir):
- # get all .py file in verb dir
- verb_paths = glob.glob(verb_dir + '/*.py')
- if verb_paths :
+ if verb_paths := glob.glob(f'{verb_dir}/*.py'):
for verb_path in verb_paths :
verb_module_name = os.path.split(verb_path)[1]
verb_module_name = os.path.splitext(verb_module_name)[0]
@@ -43,7 +41,7 @@ def import_verbs_from(proj_name, proj_dir, verb_dir) :
proj_verbs[proj_name].append(verb_module_name)
#-------------------------------------------------------------------------------
-def import_verbs(fips_dir, proj_dir) :
+def import_verbs(fips_dir, proj_dir):
"""import verbs from local and imported projects, populates
the 'verbs' and 'proj_verbs' dictionaries
@@ -52,7 +50,7 @@ def import_verbs(fips_dir, proj_dir) :
"""
# first import verbs from fips directory
- import_verbs_from('fips', fips_dir, fips_dir + '/verbs')
+ import_verbs_from('fips', fips_dir, f'{fips_dir}/verbs')
# now go through all imported projects
if fips_dir != proj_dir :
diff --git a/templates/fips b/templates/fips
index 2bdc85cf..35df2baa 100644
--- a/templates/fips
+++ b/templates/fips
@@ -1,5 +1,6 @@
#!/usr/bin/env python
"""fips main entry"""
+
import os
import sys
import subprocess
@@ -10,15 +11,17 @@ fips_github_url = 'https://github.com/MODit3D/fips.git'
proj_path = os.path.dirname(os.path.abspath(__file__))
# use FIPS_ROOT if availabe in user's environment
fips_root = os.environ.get('FIPS_ROOT') or os.path.dirname(proj_path)
-fips_path = fips_root + '/fips'
+fips_path = f'{fips_root}/fips'
-if not os.path.isdir(fips_path) :
- print("\033[93m=== cloning fips build system to '{}':\033[0m".format(fips_path))
+if not os.path.isdir(fips_path):
+ print(f"\033[93m=== cloning fips build system to '{fips_path}':\033[0m")
subprocess.call(['git', 'clone', fips_github_url, fips_path])
sys.path.insert(0,fips_path)
-try :
+try:
from mod import fips
-except ImportError :
- print("\033[91m[ERROR]\033[0m failed to initialize fips build system in '{}'".format(proj_path))
+except ImportError:
+ print(
+ f"\033[91m[ERROR]\033[0m failed to initialize fips build system in '{proj_path}'"
+ )
sys.exit(10)
fips.run(fips_path, proj_path, sys.argv)
diff --git a/templates/fips-gen.py b/templates/fips-gen.py
index 26125275..df571699 100644
--- a/templates/fips-gen.py
+++ b/templates/fips-gen.py
@@ -20,20 +20,14 @@
from mod import log
import genutil
-def processFile(attrs) :
+def processFile(attrs):
# dynamically load (and execute) the generator module
absPyPath = attrs['generator']
input = attrs['in']
out_src = attrs['out_src']
out_hdr = attrs['out_hdr']
- if 'args' in attrs :
- args = attrs['args']
- else :
- args = None
- if 'env' in attrs :
- env = attrs['env']
- else :
- env = None
+ args = attrs['args'] if 'args' in attrs else None
+ env = attrs['env'] if 'env' in attrs else None
genutil.setEnv(env)
path, script = os.path.split(absPyPath)
sys.path.insert(0, path)
diff --git a/verbs/build.py b/verbs/build.py
index dca37896..d1a1f93e 100644
--- a/verbs/build.py
+++ b/verbs/build.py
@@ -7,13 +7,11 @@
from mod import log, util, project, settings
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""build fips project"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- cfg_name = None
- if len(args) > 0 :
- cfg_name = args[0]
+ cfg_name = args[0] if len(args) > 0 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
project.build(fips_dir, proj_dir, cfg_name)
diff --git a/verbs/clean.py b/verbs/clean.py
index 162955a3..53af25e6 100644
--- a/verbs/clean.py
+++ b/verbs/clean.py
@@ -8,13 +8,11 @@
from mod import log, util, settings, project
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""clean generated files"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- cfg_name = None
- if len(args) > 0 :
- cfg_name = args[0]
+ cfg_name = args[0] if len(args) > 0 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
if cfg_name == 'all' :
diff --git a/verbs/clion.py b/verbs/clion.py
index 2b660459..356bec40 100644
--- a/verbs/clion.py
+++ b/verbs/clion.py
@@ -6,14 +6,14 @@
from mod.tools import clion
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
if len(args) > 0:
if args[0] == 'clean':
clion.cleanup(fips_dir, proj_dir)
else:
- log.error("invalid noun '{}' (expected: clean)".format(noun))
+ log.error(f"invalid noun '{noun}' (expected: clean)")
#-------------------------------------------------------------------------------
def help():
diff --git a/verbs/clone.py b/verbs/clone.py
index e82fdb94..d42d3900 100644
--- a/verbs/clone.py
+++ b/verbs/clone.py
@@ -6,20 +6,20 @@
from mod import log, util, project, registry, dep
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run the get verb"""
- if len(args) > 0 :
+ if len(args) > 0:
name = args[0]
-
+
# check project registry to resolve git url
- if registry.exists(fips_dir, name) :
+ if registry.exists(fips_dir, name):
url = registry.lookup_url(fips_dir, name)
- log.info("registry lookup: {} => {}".format(name, url))
- else :
+ log.info(f"registry lookup: {name} => {url}")
+ else:
url = name
- log.info("'{}' not in fips registry, trying as git url".format(url))
+ log.info(f"'{url}' not in fips registry, trying as git url")
project.clone(fips_dir, url)
- else :
+ else:
log.error("expected one arg [git-url]")
#-------------------------------------------------------------------------------
diff --git a/verbs/config.py b/verbs/config.py
index 32084f1d..de4c5ccc 100644
--- a/verbs/config.py
+++ b/verbs/config.py
@@ -7,13 +7,11 @@
from mod import log, util, project, settings
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""configure fips project"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- cfg_name = None
- if len(args) > 0 :
- cfg_name = args[0]
+ cfg_name = args[0] if len(args) > 0 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
project.configure(fips_dir, proj_dir, cfg_name)
diff --git a/verbs/diag.py b/verbs/diag.py
index da6135b8..d0c46486 100644
--- a/verbs/diag.py
+++ b/verbs/diag.py
@@ -21,35 +21,34 @@ def check_fips(fips_dir) :
log.colored(log.GREEN, ' uptodate')
#-------------------------------------------------------------------------------
-def check_tools(fips_dir) :
+def check_tools(fips_dir):
"""check whether required command line tools can be found"""
log.colored(log.YELLOW, '=== tools:')
tools = [ git, cmake, ccmake, cmake_gui, make, ninja, xcodebuild, xcrun, javac, java, node, python2, ccache, vscode, clion ]
platform = util.get_host_platform()
for tool in tools:
- if platform in tool.platforms :
- if tool.check_exists(fips_dir) :
+ if platform in tool.platforms:
+ if tool.check_exists(fips_dir):
log.ok(tool.name, 'found')
- else :
- if tool.optional :
- log.optional(tool.name, 'OPTIONAL, NOT FOUND ({})'.format(tool.not_found))
- else :
- log.failed(tool.name, 'NOT FOUND ({})'.format(tool.not_found))
+ elif tool.optional:
+ log.optional(tool.name, f'OPTIONAL, NOT FOUND ({tool.not_found})')
+ else:
+ log.failed(tool.name, f'NOT FOUND ({tool.not_found})')
#-------------------------------------------------------------------------------
-def check_configs(fips_dir, proj_dir) :
+def check_configs(fips_dir, proj_dir):
"""find configs and check if they are valid"""
log.colored(log.YELLOW, '=== configs:')
dirs = [ fips_dir ]
configs = config.load(fips_dir, proj_dir, '*')
- for cfg in configs :
+ for cfg in configs:
log.colored(log.BLUE, cfg['name'])
valid, errors = config.check_config_valid(fips_dir, proj_dir, cfg)
- if valid :
+ if valid:
log.colored(log.GREEN, ' ok')
- else :
- for error in errors :
- log.info(' {}'.format(error))
+ else:
+ for error in errors:
+ log.info(f' {error}')
#-------------------------------------------------------------------------------
def check_imports(fips_dir, proj_dir) :
@@ -70,17 +69,15 @@ def check_local_changes(fips_dir, proj_dir) :
log.warn('currently not in a project directory')
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run diagnostics
:param fips_dir: absolute path to fips directory
:param proj_dir: absolute path to current project
:args: command line args
"""
- noun = 'all'
ok = False
- if len(args) > 0 :
- noun = args[0]
+ noun = args[0] if len(args) > 0 else 'all'
if noun in ['all', 'configs'] :
check_configs(fips_dir, proj_dir)
ok = True
@@ -96,8 +93,8 @@ def run(fips_dir, proj_dir, args) :
if noun in ['all', 'fips'] :
check_fips(fips_dir)
ok = True
- if not ok :
- log.error("invalid noun '{}'".format(noun))
+ if not ok:
+ log.error(f"invalid noun '{noun}'")
#-------------------------------------------------------------------------------
def help() :
diff --git a/verbs/gdb.py b/verbs/gdb.py
index 2179f3e8..f8c9666d 100644
--- a/verbs/gdb.py
+++ b/verbs/gdb.py
@@ -10,22 +10,20 @@
from mod import log, util, config, project, settings
#-------------------------------------------------------------------------------
-def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None) :
+def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None):
"""debug a single target with gdb"""
# prepare
proj_name = util.get_project_name_from_dir(proj_dir)
util.ensure_valid_project_dir(proj_dir)
- # load the config(s)
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
- for cfg in configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
+ for cfg in configs:
# check if config is valid
config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True)
- if config_valid :
+ if config_valid:
deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name'])
- log.colored(log.YELLOW, "=== gdb: {}".format(cfg['name']))
+ log.colored(log.YELLOW, f"=== gdb: {cfg['name']}")
cmdLine = ['gdb', "-ex", "run", "--args", target]
if target_args :
cmdLine.extend(target_args)
@@ -33,29 +31,25 @@ def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None) :
subprocess.call(args = cmdLine, cwd = deploy_dir)
except OSError :
log.error("Failed to execute gdb (not installed?)")
- else :
- log.error("Config '{}' not valid in this environment".format(cfg['name']))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
+ else:
+ log.error(f"Config '{cfg['name']}' not valid in this environment")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
return True
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""debug a single target with gdb"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- tgt_name = None
- cfg_name = None
target_args = []
if '--' in args :
idx = args.index('--')
target_args = args[(idx + 1):]
args = args[:idx]
- if len(args) > 0 :
- tgt_name = args[0]
- if len(args) > 1:
- cfg_name = args[1]
+ tgt_name = args[0] if len(args) > 0 else None
+ cfg_name = args[1] if len(args) > 1 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
if not tgt_name :
diff --git a/verbs/gen.py b/verbs/gen.py
index d68a14d5..70bfcdf7 100644
--- a/verbs/gen.py
+++ b/verbs/gen.py
@@ -7,14 +7,12 @@
from mod import log, util, project, settings
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run the gen verb"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- cfg_name = None
- if len(args) > 0 :
- cfg_name = args[0]
- if cfg_name == None :
+ cfg_name = args[0] if len(args) > 0 else None
+ if cfg_name is None:
cfg_name = settings.get(proj_dir, 'config')
project.gen(fips_dir, proj_dir, cfg_name)
diff --git a/verbs/list.py b/verbs/list.py
index ca45e498..0d91b9bc 100644
--- a/verbs/list.py
+++ b/verbs/list.py
@@ -15,82 +15,80 @@
from mod import log, util, config, project, registry, settings, dep
#-------------------------------------------------------------------------------
-def list_build_tools() :
+def list_build_tools():
"""list supported build tools"""
log.colored(log.YELLOW, '=== build-tools:')
- for tool in config.build_tools :
- log.info('{}'.format(tool))
+ for tool in config.build_tools:
+ log.info(f'{tool}')
#-------------------------------------------------------------------------------
-def list_configs(fips_dir, proj_dir) :
+def list_configs(fips_dir, proj_dir):
"""list available configs"""
log.colored(log.YELLOW, '=== configs:')
configs = config.list(fips_dir, proj_dir, '*')
- for folder in configs :
- log.colored(log.BLUE, 'from {}:'.format(folder))
- for cfg in configs[folder] :
- log.info(' {}'.format(cfg))
+ for folder in configs:
+ log.colored(log.BLUE, f'from {folder}:')
+ for cfg in configs[folder]:
+ log.info(f' {cfg}')
#-------------------------------------------------------------------------------
-def list_registry(fips_dir) :
+def list_registry(fips_dir):
"""list registry entries"""
log.colored(log.YELLOW, '=== registry:')
registry.load(fips_dir)
- for key in registry.registry :
- log.info('{}{}{} => {}'.format(log.BLUE, key, log.DEF, registry.registry[key]))
+ for key in registry.registry:
+ log.info(f'{log.BLUE}{key}{log.DEF} => {registry.registry[key]}')
#-------------------------------------------------------------------------------
-def list_settings(proj_dir) :
+def list_settings(proj_dir):
"""list settings file content"""
log.colored(log.YELLOW, '=== settings:')
- if util.is_valid_project_dir(proj_dir) :
- for key in ['config', 'target', 'jobs', 'ccache', 'iosteam'] :
+ if util.is_valid_project_dir(proj_dir):
+ for key in ['config', 'target', 'jobs', 'ccache', 'iosteam']:
value = settings.get(proj_dir, key)
if type(value) is bool :
value = 'on' if value else 'off'
default = ' (default value)' if value == settings.get_default(key) else ''
- log.info(' {}{}:{} {}{}'.format(log.BLUE, key, log.DEF, value, default))
- else :
+ log.info(f' {log.BLUE}{key}:{log.DEF} {value}{default}')
+ else:
log.info(' currently not in a valid project directory')
#-------------------------------------------------------------------------------
-def list_targets(fips_dir, proj_dir, args) :
+def list_targets(fips_dir, proj_dir, args):
log.colored(log.YELLOW, "=== targets:")
- if util.is_valid_project_dir(proj_dir) :
+ if util.is_valid_project_dir(proj_dir):
# get config name
- if len(args) == 0 :
- cfg_name = settings.get(proj_dir, 'config')
- else :
- cfg_name = args[0]
- log.info('{} config:{} {}'.format(log.BLUE, log.DEF, cfg_name))
+ cfg_name = settings.get(proj_dir, 'config') if len(args) == 0 else args[0]
+ log.info(f'{log.BLUE} config:{log.DEF} {cfg_name}')
# get the target list
success, targets = project.get_target_list(fips_dir, proj_dir, cfg_name)
- if success :
+ if success:
# split targets by type
- for type in ['lib', 'module', 'sharedlib', 'app'] :
- type_targets = [tgt for tgt in targets if targets[tgt] == type]
- if len(type_targets) > 0 :
- log.colored(log.BLUE, ' {}:'.format(type))
- for tgt in type_targets :
- log.info(' ' + tgt)
- else :
- log.info(" can't fetch project target list, please run 'fips gen' first!")
- else :
+ for type in ['lib', 'module', 'sharedlib', 'app']:
+ if type_targets := [
+ tgt for tgt in targets if targets[tgt] == type
+ ]:
+ log.colored(log.BLUE, f' {type}:')
+ for tgt in type_targets:
+ log.info(f' {tgt}')
+ else:
+ log.info(" can't fetch project target list, please run 'fips gen' first!")
+ else:
log.info(' currently not in a valid project directory')
#-------------------------------------------------------------------------------
-def list_exports(fips_dir, proj_dir) :
+def list_exports(fips_dir, proj_dir):
"""list project exports"""
log.colored(log.YELLOW, '=== exports:')
- if util.is_valid_project_dir(proj_dir) :
+ if util.is_valid_project_dir(proj_dir):
success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
if not success :
log.warn("missing import project directories, please un 'fips fetch'")
- for dep_proj_name in result :
+ for dep_proj_name in result:
cur_dep = result[dep_proj_name]
- log.colored(log.BLUE, "project '{}' exports:".format(dep_proj_name))
-
+ log.colored(log.BLUE, f"project '{dep_proj_name}' exports:")
+
cur_modules = cur_dep['exports']['modules']
cur_hdrs = cur_dep['exports']['header-dirs']
cur_libs = cur_dep['exports']['lib-dirs']
@@ -99,63 +97,61 @@ def list_exports(fips_dir, proj_dir) :
if not (cur_modules or cur_hdrs or cur_libs or cur_defs) :
log.info(" nothing")
- if cur_modules :
+ if cur_modules:
log.info(" modules:")
- for mod in cur_modules :
- log.info(" {} => {}".format(mod, cur_modules[mod]))
+ for mod in cur_modules:
+ log.info(f" {mod} => {cur_modules[mod]}")
- if cur_hdrs :
+ if cur_hdrs:
log.info(" header search dirs:")
- for hdr in cur_hdrs :
- log.info(" {}".format(hdr))
+ for hdr in cur_hdrs:
+ log.info(f" {hdr}")
- if cur_libs :
+ if cur_libs:
log.info(" lib search dirs:")
- for lib in cur_libs :
- log.info(" {}".format(lib))
+ for lib in cur_libs:
+ log.info(f" {lib}")
- if cur_defs :
+ if cur_defs:
log.info(" defines:")
- for define in cur_defs :
- log.info(" {} => {}".format(define, cur_defs[define]))
- else :
+ for define in cur_defs:
+ log.info(f" {define} => {cur_defs[define]}")
+ else:
log.info(' currently not in a valid project directory')
#-------------------------------------------------------------------------------
-def list_imports(fips_dir, proj_dir) :
+def list_imports(fips_dir, proj_dir):
"""list project imports"""
log.colored(log.YELLOW, '=== imports:')
- if util.is_valid_project_dir(proj_dir) :
+ if util.is_valid_project_dir(proj_dir):
success, result = dep.get_all_imports_exports(fips_dir, proj_dir)
if not success :
log.warn("missing import project directories, please run 'fips fetch'")
- for dep_proj_name in result :
+ for dep_proj_name in result:
# top level project is in result, but has no URL set, filter
# this from the output
- log.colored(log.BLUE, "project '{}' imports:".format(dep_proj_name))
+ log.colored(log.BLUE, f"project '{dep_proj_name}' imports:")
cur_dep = result[dep_proj_name]
- if cur_dep['imports'] :
- for imp_proj in cur_dep['imports'] :
+ if cur_dep['imports']:
+ for imp_proj in cur_dep['imports']:
git_url = cur_dep['imports'][imp_proj]['git']
git_branch = cur_dep['imports'][imp_proj]['branch']
- log.info(" '{}' from '{}' at branch '{}'".format(imp_proj, git_url, git_branch))
- else :
+ log.info(f" '{imp_proj}' from '{git_url}' at branch '{git_branch}'")
+ else:
log.info(" nothing")
- else :
+ else:
log.info(' currently not in a valid project directory')
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""list stuff
:param fips_dir: absolute path to fips
:param proj_dir: absolute path to current project
:param args: command line args
"""
- noun = 'all'
ok = False
- if len(args) > 0 :
- noun = args[0]
+ noun = args[0] if len(args) > 0 else 'all'
if noun in ['all', 'configs'] :
list_configs(fips_dir, proj_dir)
ok = True
@@ -177,8 +173,8 @@ def run(fips_dir, proj_dir, args) :
if noun in ['all', 'targets'] :
list_targets(fips_dir, proj_dir, args[1:])
ok = True
- if not ok :
- log.error("invalid noun '{}'".format(noun))
+ if not ok:
+ log.error(f"invalid noun '{noun}'")
#-------------------------------------------------------------------------------
def help() :
diff --git a/verbs/make.py b/verbs/make.py
index 3dad893c..f285c667 100644
--- a/verbs/make.py
+++ b/verbs/make.py
@@ -8,16 +8,12 @@
from mod import log, util, settings, project
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""build a single target"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- tgt_name = None
- cfg_name = None
- if len(args) > 0 :
- tgt_name = args[0]
- if len(args) > 1:
- cfg_name = args[1]
+ tgt_name = args[0] if len(args) > 0 else None
+ cfg_name = args[1] if len(args) > 1 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
if not tgt_name :
diff --git a/verbs/markdeep.py b/verbs/markdeep.py
index a6362b42..cd3fde9b 100644
--- a/verbs/markdeep.py
+++ b/verbs/markdeep.py
@@ -6,7 +6,7 @@ def run(fips_dir, proj_dir, args):
proj_name = args[1]
proj_dir = util.get_project_dir(fips_dir, proj_name)
if not util.is_valid_project_dir(proj_dir):
- log.error('{} is not a valid fips project!'.format(proj_name))
+ log.error(f'{proj_name} is not a valid fips project!')
if args[0] == 'build':
markdeep.build(fips_dir, proj_dir)
elif args[0] == 'view':
diff --git a/verbs/open.py b/verbs/open.py
index 61561d89..dd0bc3be 100644
--- a/verbs/open.py
+++ b/verbs/open.py
@@ -12,20 +12,16 @@
from mod.tools import vscode, clion
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run the 'open' verb (opens project in IDE)"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
proj_name = util.get_project_name_from_dir(proj_dir)
- cfg_name = None
- if len(args) > 0 :
- cfg_name = args[0]
+ cfg_name = args[0] if len(args) > 0 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
- # check the cmake generator of this config
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
# hmm, only look at first match, 'open' doesn't
# make sense with config-patterns
cfg = configs[0]
@@ -44,26 +40,25 @@ def run(fips_dir, proj_dir, args) :
if cfg['build_tool'] == 'clion':
clion.run(proj_dir)
return
- # try to open as Xcode project
- proj = glob.glob(build_dir + '/*.xcodeproj')
- if proj :
- subprocess.call('open "{}"'.format(proj[0]), shell=True)
+ if proj := glob.glob(f'{build_dir}/*.xcodeproj'):
+ subprocess.call(f'open "{proj[0]}"', shell=True)
return
- # try to open as VS project
- proj = glob.glob(build_dir + '/*.sln')
- if proj :
- subprocess.call('cmd /c start {}'.format(proj[0]), shell=True)
+ if proj := glob.glob(f'{build_dir}/*.sln'):
+ subprocess.call(f'cmd /c start {proj[0]}', shell=True)
return
- # try to open as eclipse project
- proj = glob.glob(build_dir + '/.cproject')
- if proj :
- subprocess.call('eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{}"'.format(build_dir), shell=True)
+ if proj := glob.glob(f'{build_dir}/.cproject'):
+ subprocess.call(
+ f'eclipse -nosplash --launcher.timeout 60 -application org.eclipse.cdt.managedbuilder.core.headlessbuild -import "{build_dir}"',
+ shell=True,
+ )
subprocess.call('eclipse', shell=True)
return
- log.error("don't know how to open a '{}' project in {}".format(cfg['generator'], build_dir))
- else :
- log.error("config '{}' not found".format(cfg_name))
+ log.error(
+ f"don't know how to open a '{cfg['generator']}' project in {build_dir}"
+ )
+ else:
+ log.error(f"config '{cfg_name}' not found")
#-------------------------------------------------------------------------------
def help() :
diff --git a/verbs/setup.py b/verbs/setup.py
index 30e99bc1..7fb6dd10 100644
--- a/verbs/setup.py
+++ b/verbs/setup.py
@@ -7,16 +7,14 @@
from mod import log, emscripten, android
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run the 'setup' verb"""
- sdk_name = None
- if len(args) > 0 :
- sdk_name = args[0]
- if sdk_name == 'emscripten' :
- emscripten.setup(fips_dir, proj_dir)
- elif sdk_name == 'android' :
+ sdk_name = args[0] if len(args) > 0 else None
+ if sdk_name == 'android':
android.setup(fips_dir, proj_dir)
- else :
+ elif sdk_name == 'emscripten':
+ emscripten.setup(fips_dir, proj_dir)
+ else:
log.error("invalid SDK name (must be 'emscripten' or 'android')")
#-------------------------------------------------------------------------------
diff --git a/verbs/unset.py b/verbs/unset.py
index af2f53fd..694d3351 100644
--- a/verbs/unset.py
+++ b/verbs/unset.py
@@ -9,24 +9,23 @@
valid_nouns = ['config', 'target', 'jobs', 'ccache']
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""run the 'unset' verb"""
- if len(args) > 0 :
+ if len(args) > 0:
noun = args[0]
- if noun in valid_nouns :
+ if noun in valid_nouns:
settings.unset(proj_dir, noun)
- else :
- log.error("invalid noun '{}', must be: {}".format(
- noun, ', '.join(valid_nouns)))
- else :
- log.error("expected noun: {}".format(', '.join(valid_nouns)))
+ else:
+ log.error(f"invalid noun '{noun}', must be: {', '.join(valid_nouns)}")
+ else:
+ log.error(f"expected noun: {', '.join(valid_nouns)}")
#-------------------------------------------------------------------------------
-def help() :
+def help():
"""print 'unset' help"""
- log.info(log.YELLOW +
- "fips unset [{}]\n" .format('|'.join(valid_nouns)) + log.DEF +
- " unset currently active config or make-target")
+ log.info(
+ f"{log.YELLOW}fips unset [{'|'.join(valid_nouns)}]\n{log.DEF} unset currently active config or make-target"
+ )
diff --git a/verbs/valgrind.py b/verbs/valgrind.py
index fb098095..6305b030 100644
--- a/verbs/valgrind.py
+++ b/verbs/valgrind.py
@@ -10,60 +10,54 @@
from mod import log, util, config, project, settings
#-------------------------------------------------------------------------------
-def valgrind(fips_dir, proj_dir, cfg_name, target, target_args) :
+def valgrind(fips_dir, proj_dir, cfg_name, target, target_args):
"""debug a single target with valgrind"""
# prepare
proj_name = util.get_project_name_from_dir(proj_dir)
util.ensure_valid_project_dir(proj_dir)
- # load the config(s)
- configs = config.load(fips_dir, proj_dir, cfg_name)
- if configs :
- for cfg in configs :
+ if configs := config.load(fips_dir, proj_dir, cfg_name):
+ for cfg in configs:
# check if config is valid
config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True)
- if config_valid :
+ if config_valid:
deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg['name'])
valgrind_bin = settings.get(proj_dir, 'valgrind')
if not valgrind_bin :
valgrind_bin = 'valgrind'
- log.colored(log.YELLOW, "=== valgrind: {} ({})".format(cfg['name'], valgrind_bin))
+ log.colored(log.YELLOW, f"=== valgrind: {cfg['name']} ({valgrind_bin})")
cmd_line = valgrind_bin
- if target_args :
+ if target_args:
cmd_line += ' ' + ' '.join(target_args)
- else :
+ else:
cmd_line += ' ' + '--leak-check=no'
cmd_line += ' ' + '--show-reachable=yes'
cmd_line += ' ' + '--track-fds=yes'
cmd_line += ' ' + '--run-libc-freeres=no'
- cmd_line += ' ' + "--log-file={}/valgrind-{}.log".format(proj_dir, target)
- cmd_line += ' ' + "./{}".format(target)
+ cmd_line += f" --log-file={proj_dir}/valgrind-{target}.log"
+ cmd_line += f" ./{target}"
#log.colored(log.GREEN, "cmdline: {}".format(cmd_line))
subprocess.call(args = cmd_line, cwd = deploy_dir, shell = True)
- else :
- log.error("Config '{}' not valid in this environment".format(cfg['name']))
- else :
- log.error("No valid configs found for '{}'".format(cfg_name))
+ else:
+ log.error(f"Config '{cfg['name']}' not valid in this environment")
+ else:
+ log.error(f"No valid configs found for '{cfg_name}'")
return True
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
"""debug a single target with valgrind"""
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
- tgt_name = None
- cfg_name = None
tgt_args = []
if '--' in args :
idx = args.index('--')
tgt_args = args[(idx + 1):]
args = args[:idx]
- if len(args) > 0 :
- tgt_name = args[0]
- if len(args) > 1 :
- cfg_name = args[1]
+ tgt_name = args[0] if len(args) > 0 else None
+ cfg_name = args[1] if len(args) > 1 else None
if not cfg_name :
cfg_name = settings.get(proj_dir, 'config')
if not tgt_name :
diff --git a/verbs/vscode.py b/verbs/vscode.py
index 34e686e1..759edd8e 100644
--- a/verbs/vscode.py
+++ b/verbs/vscode.py
@@ -6,14 +6,14 @@
from mod.tools import vscode
#-------------------------------------------------------------------------------
-def run(fips_dir, proj_dir, args) :
+def run(fips_dir, proj_dir, args):
if not util.is_valid_project_dir(proj_dir) :
log.error('must be run in a project directory')
if len(args) > 0:
if args[0] == 'clean':
vscode.cleanup(fips_dir, proj_dir)
else:
- log.error("invalid noun '{}' (expected: clean)".format(noun))
+ log.error(f"invalid noun '{noun}' (expected: clean)")
#-------------------------------------------------------------------------------
def help():
diff --git a/yaml/yaml2/__init__.py b/yaml/yaml2/__init__.py
index 76e19e13..62fd91c6 100644
--- a/yaml/yaml2/__init__.py
+++ b/yaml/yaml2/__init__.py
@@ -277,11 +277,11 @@ class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
- def __init__(cls, name, bases, kwds):
- super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ def __init__(self, name, bases, kwds):
+ super(YAMLObjectMetaclass, self).__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
- cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
- cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+ self.yaml_loader.add_constructor(self.yaml_tag, self.from_yaml)
+ self.yaml_dumper.add_representer(self, self.to_yaml)
class YAMLObject(object):
"""
@@ -298,18 +298,19 @@ class YAMLObject(object):
yaml_tag = None
yaml_flow_style = None
- def from_yaml(cls, loader, node):
+ def from_yaml(self, loader, node):
"""
Convert a representation node to a Python object.
"""
- return loader.construct_yaml_object(node, cls)
+ return loader.construct_yaml_object(node, self)
from_yaml = classmethod(from_yaml)
- def to_yaml(cls, dumper, data):
+ def to_yaml(self, dumper, data):
"""
Convert a Python object to a representation node.
"""
- return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
- flow_style=cls.yaml_flow_style)
+ return dumper.represent_yaml_object(
+ self.yaml_tag, data, self, flow_style=self.yaml_flow_style
+ )
to_yaml = classmethod(to_yaml)
diff --git a/yaml/yaml2/constructor.py b/yaml/yaml2/constructor.py
index 635faac3..200b8200 100644
--- a/yaml/yaml2/constructor.py
+++ b/yaml/yaml2/constructor.py
@@ -35,18 +35,13 @@ def get_data(self):
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
- if node is not None:
- return self.construct_document(node)
- return None
+ return self.construct_document(node) if node is not None else None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
- for generator in state_generators:
- for dummy in generator:
- pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
@@ -91,10 +86,7 @@ def construct_object(self, node, deep=False):
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
- if self.deep_construct:
- for dummy in generator:
- pass
- else:
+ if not self.deep_construct:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
@@ -104,16 +96,22 @@ def construct_object(self, node, deep=False):
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
- raise ConstructorError(None, None,
- "expected a scalar node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a scalar node, but found {node.id}",
+ node.start_mark,
+ )
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
- raise ConstructorError(None, None,
- "expected a sequence node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a sequence node, but found {node.id}",
+ node.start_mark,
+ )
return [self.construct_object(child, deep=deep)
for child in node.value]
@@ -136,9 +134,12 @@ def construct_mapping(self, node, deep=False):
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
- raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a mapping node, but found {node.id}",
+ node.start_mark,
+ )
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
@@ -146,16 +147,16 @@ def construct_pairs(self, node, deep=False):
pairs.append((key, value))
return pairs
- def add_constructor(cls, tag, constructor):
- if not 'yaml_constructors' in cls.__dict__:
- cls.yaml_constructors = cls.yaml_constructors.copy()
- cls.yaml_constructors[tag] = constructor
+ def add_constructor(self, tag, constructor):
+ if 'yaml_constructors' not in self.__dict__:
+ self.yaml_constructors = self.yaml_constructors.copy()
+ self.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
- def add_multi_constructor(cls, tag_prefix, multi_constructor):
- if not 'yaml_multi_constructors' in cls.__dict__:
- cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
- cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+ def add_multi_constructor(self, tag_prefix, multi_constructor):
+ if 'yaml_multi_constructors' not in self.__dict__:
+ self.yaml_multi_constructors = self.yaml_multi_constructors.copy()
+ self.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
@@ -181,19 +182,24 @@ def flatten_mapping(self, node):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing a mapping",
- node.start_mark,
- "expected a mapping for merging, but found %s"
- % subnode.id, subnode.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping",
+ node.start_mark,
+ f"expected a mapping for merging, but found {subnode.id}",
+ subnode.start_mark,
+ )
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "expected a mapping or list of mappings for merging, but found %s"
- % value_node.id, value_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping",
+ node.start_mark,
+ f"expected a mapping or list of mappings for merging, but found {value_node.id}",
+ value_node.start_mark,
+ )
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
@@ -227,9 +233,7 @@ def construct_yaml_bool(self, node):
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
- sign = +1
- if value[0] == '-':
- sign = -1
+ sign = -1 if value[0] == '-' else +1
if value[0] in '+-':
value = value[1:]
if value == '0':
@@ -260,9 +264,7 @@ def construct_yaml_int(self, node):
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
- sign = +1
- if value[0] == '-':
- sign = -1
+ sign = -1 if value[0] == '-' else +1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
@@ -337,13 +339,20 @@ def construct_yaml_omap(self, node):
omap = []
yield omap
if not isinstance(node, SequenceNode):
- raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ raise ConstructorError(
+ "while constructing an ordered map",
+ node.start_mark,
+ f"expected a sequence, but found {node.id}",
+ node.start_mark,
+ )
for subnode in node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ raise ConstructorError(
+ "while constructing an ordered map",
+ node.start_mark,
+ f"expected a mapping of length 1, but found {subnode.id}",
+ subnode.start_mark,
+ )
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
@@ -358,13 +367,20 @@ def construct_yaml_pairs(self, node):
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
- raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ raise ConstructorError(
+ "while constructing pairs",
+ node.start_mark,
+ f"expected a sequence, but found {node.id}",
+ node.start_mark,
+ )
for subnode in node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ raise ConstructorError(
+ "while constructing pairs",
+ node.start_mark,
+ f"expected a mapping of length 1, but found {subnode.id}",
+ subnode.start_mark,
+ )
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
@@ -514,16 +530,14 @@ def find_python_name(self, name, mark):
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
- value = self.construct_scalar(node)
- if value:
+ if value := self.construct_scalar(node):
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
- value = self.construct_scalar(node)
- if value:
+ if value := self.construct_scalar(node):
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
diff --git a/yaml/yaml2/emitter.py b/yaml/yaml2/emitter.py
index e5bcdccc..1b906e98 100644
--- a/yaml/yaml2/emitter.py
+++ b/yaml/yaml2/emitter.py
@@ -146,10 +146,7 @@ def need_events(self, count):
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
- if flow:
- self.indent = self.best_indent
- else:
- self.indent = 0
+ self.indent = self.best_indent if flow else 0
elif not indentless:
self.indent += self.best_indent
@@ -158,17 +155,15 @@ def increase_indent(self, flow=False, indentless=False):
# Stream handlers.
def expect_stream_start(self):
- if isinstance(self.event, StreamStartEvent):
- if self.event.encoding and not getattr(self.stream, 'encoding', None):
- self.encoding = self.event.encoding
- self.write_stream_start()
- self.state = self.expect_first_document_start
- else:
- raise EmitterError("expected StreamStartEvent, but got %s"
- % self.event)
+ if not isinstance(self.event, StreamStartEvent):
+ raise EmitterError(f"expected StreamStartEvent, but got {self.event}")
+ if self.event.encoding and not getattr(self.stream, 'encoding', None):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
def expect_nothing(self):
- raise EmitterError("expected nothing, but got %s" % self.event)
+ raise EmitterError(f"expected nothing, but got {self.event}")
# Document handlers.
@@ -209,20 +204,17 @@ def expect_document_start(self, first=False):
self.write_stream_end()
self.state = self.expect_nothing
else:
- raise EmitterError("expected DocumentStartEvent, but got %s"
- % self.event)
+ raise EmitterError(f"expected DocumentStartEvent, but got {self.event}")
def expect_document_end(self):
- if isinstance(self.event, DocumentEndEvent):
+ if not isinstance(self.event, DocumentEndEvent):
+ raise EmitterError(f"expected DocumentEndEvent, but got {self.event}")
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator(u'...', True)
self.write_indent()
- if self.event.explicit:
- self.write_indicator(u'...', True)
- self.write_indent()
- self.flush_stream()
- self.state = self.expect_document_start
- else:
- raise EmitterError("expected DocumentEndEvent, but got %s"
- % self.event)
+ self.flush_stream()
+ self.state = self.expect_document_start
def expect_document_root(self):
self.states.append(self.expect_document_end)
@@ -245,18 +237,18 @@ def expect_node(self, root=False, sequence=False, mapping=False,
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
- or self.check_empty_sequence():
+ or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
- or self.check_empty_mapping():
+ or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
- raise EmitterError("expected NodeEvent, but got %s" % self.event)
+ raise EmitterError(f"expected NodeEvent, but got {self.event}")
def expect_alias(self):
if self.event.anchor is None:
@@ -480,10 +472,9 @@ def process_tag(self):
if self.event.implicit[0] and tag is None:
tag = u'!'
self.prepared_tag = None
- else:
- if (not self.canonical or tag is None) and self.event.implicit:
- self.prepared_tag = None
- return
+ elif (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
@@ -566,15 +557,14 @@ def prepare_tag_prefix(self, prefix):
while end < len(prefix):
ch = prefix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- or ch in u'-;/?!:@&=+$,_.~*\'()[]':
+ or ch in u'-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
- for ch in data:
- chunks.append(u'%%%02X' % ord(ch))
+ chunks.extend(u'%%%02X' % ord(ch) for ch in data)
if start < end:
chunks.append(prefix[start:end])
return u''.join(chunks)
@@ -590,7 +580,7 @@ def prepare_tag(self, tag):
prefixes.sort()
for prefix in prefixes:
if tag.startswith(prefix) \
- and (prefix == u'!' or len(prefix) < len(tag)):
+ and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
@@ -598,23 +588,19 @@ def prepare_tag(self, tag):
while end < len(suffix):
ch = suffix[end]
if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- or ch in u'-;/?:@&=+$,_.~*\'()[]' \
- or (ch == u'!' and handle != u'!'):
+ or ch in u'-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == u'!' and handle != u'!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
- for ch in data:
- chunks.append(u'%%%02X' % ord(ch))
+ chunks.extend(u'%%%02X' % ord(ch) for ch in data)
if start < end:
chunks.append(suffix[start:end])
suffix_text = u''.join(chunks)
- if handle:
- return u'%s%s' % (handle, suffix_text)
- else:
- return u'!<%s>' % suffix_text
+ return f'{handle}{suffix_text}' if handle else f'!<{suffix_text}>'
def prepare_anchor(self, anchor):
if not anchor:
@@ -801,10 +787,7 @@ def write_stream_end(self):
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
- if self.whitespace or not need_whitespace:
- data = indicator
- else:
- data = u' '+indicator
+ data = indicator if self.whitespace or not need_whitespace else f' {indicator}'
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
diff --git a/yaml/yaml2/events.py b/yaml/yaml2/events.py
index f79ad389..3eb33010 100644
--- a/yaml/yaml2/events.py
+++ b/yaml/yaml2/events.py
@@ -10,7 +10,7 @@ def __repr__(self):
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
- return '%s(%s)' % (self.__class__.__name__, arguments)
+ return f'{self.__class__.__name__}({arguments})'
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
diff --git a/yaml/yaml2/parser.py b/yaml/yaml2/parser.py
index f9e3057f..86b46d51 100644
--- a/yaml/yaml2/parser.py
+++ b/yaml/yaml2/parser.py
@@ -138,23 +138,20 @@ def parse_stream_start(self):
def parse_implicit_document_start(self):
- # Parse an implicit document.
- if not self.check_token(DirectiveToken, DocumentStartToken,
+ if self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
- self.tag_handles = self.DEFAULT_TAGS
- token = self.peek_token()
- start_mark = end_mark = token.start_mark
- event = DocumentStartEvent(start_mark, end_mark,
- explicit=False)
-
- # Prepare the next state.
- self.states.append(self.parse_document_end)
- self.state = self.parse_block_node
+ return self.parse_document_start()
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
- return event
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
- else:
- return self.parse_document_start()
+ return event
def parse_document_start(self):
@@ -206,13 +203,13 @@ def parse_document_end(self):
return event
def parse_document_content(self):
- if self.check_token(DirectiveToken,
- DocumentStartToken, DocumentEndToken, StreamEndToken):
- event = self.process_empty_scalar(self.peek_token().start_mark)
- self.state = self.states.pop()
- return event
- else:
+ if not self.check_token(
+ DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
+ ):
return self.parse_block_node()
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
def process_directives(self):
self.yaml_version = None
@@ -300,14 +297,14 @@ def parse_node(self, block=False, indentless_sequence=False):
anchor = token.value
if tag is not None:
handle, suffix = tag
- if handle is not None:
- if handle not in self.tag_handles:
- raise ParserError("while parsing a node", start_mark,
- "found undefined tag handle %r" % handle.encode('utf-8'),
- tag_mark)
- tag = self.tag_handles[handle]+suffix
- else:
+ if handle is None:
tag = suffix
+ elif handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle.encode('utf-8'),
+ tag_mark)
+ else:
+ tag = self.tag_handles[handle]+suffix
#if tag == u'!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
@@ -321,54 +318,53 @@ def parse_node(self, block=False, indentless_sequence=False):
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
- else:
- if self.check_token(ScalarToken):
- token = self.get_token()
- end_mark = token.end_mark
- if (token.plain and tag is None) or tag == u'!':
- implicit = (True, False)
- elif tag is None:
- implicit = (False, True)
- else:
- implicit = (False, False)
- event = ScalarEvent(anchor, tag, implicit, token.value,
- start_mark, end_mark, style=token.style)
- self.state = self.states.pop()
- elif self.check_token(FlowSequenceStartToken):
- end_mark = self.peek_token().end_mark
- event = SequenceStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=True)
- self.state = self.parse_flow_sequence_first_entry
- elif self.check_token(FlowMappingStartToken):
- end_mark = self.peek_token().end_mark
- event = MappingStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=True)
- self.state = self.parse_flow_mapping_first_key
- elif block and self.check_token(BlockSequenceStartToken):
- end_mark = self.peek_token().start_mark
- event = SequenceStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=False)
- self.state = self.parse_block_sequence_first_entry
- elif block and self.check_token(BlockMappingStartToken):
- end_mark = self.peek_token().start_mark
- event = MappingStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=False)
- self.state = self.parse_block_mapping_first_key
- elif anchor is not None or tag is not None:
- # Empty scalars are allowed even if a tag or an anchor is
- # specified.
- event = ScalarEvent(anchor, tag, (implicit, False), u'',
- start_mark, end_mark)
- self.state = self.states.pop()
+ elif self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == u'!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
else:
- if block:
- node = 'block'
- else:
- node = 'flow'
- token = self.peek_token()
- raise ParserError("while parsing a %s node" % node, start_mark,
- "expected the node content, but found %r" % token.id,
- token.start_mark)
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), u'',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ node = 'block' if block else 'flow'
+ token = self.peek_token()
+ raise ParserError(
+ f"while parsing a {node} node",
+ start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark,
+ )
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
diff --git a/yaml/yaml2/reader.py b/yaml/yaml2/reader.py
index 3249e6b9..9d0b62a0 100644
--- a/yaml/yaml2/reader.py
+++ b/yaml/yaml2/reader.py
@@ -136,8 +136,7 @@ def determine_encoding(self):
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
- match = self.NON_PRINTABLE.search(data)
- if match:
+ if match := self.NON_PRINTABLE.search(data):
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
@@ -175,8 +174,7 @@ def update(self, length):
break
def update_raw(self, size=1024):
- data = self.stream.read(size)
- if data:
+ if data := self.stream.read(size):
self.raw_buffer += data
self.stream_pointer += len(data)
else:
diff --git a/yaml/yaml2/representer.py b/yaml/yaml2/representer.py
index 5f4fc70d..24ef8ffb 100644
--- a/yaml/yaml2/representer.py
+++ b/yaml/yaml2/representer.py
@@ -38,16 +38,10 @@ def get_classobj_bases(self, cls):
return bases
def represent_data(self, data):
- if self.ignore_aliases(data):
- self.alias_key = None
- else:
- self.alias_key = id(data)
+ self.alias_key = None if self.ignore_aliases(data) else id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
- node = self.represented_objects[self.alias_key]
- #if node is None:
- # raise RepresenterError("recursive objects are not allowed: %r" % data)
- return node
+ return self.represented_objects[self.alias_key]
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
@@ -71,16 +65,16 @@ def represent_data(self, data):
# self.represented_objects[alias_key] = node
return node
- def add_representer(cls, data_type, representer):
- if not 'yaml_representers' in cls.__dict__:
- cls.yaml_representers = cls.yaml_representers.copy()
- cls.yaml_representers[data_type] = representer
+ def add_representer(self, data_type, representer):
+ if 'yaml_representers' not in self.__dict__:
+ self.yaml_representers = self.yaml_representers.copy()
+ self.yaml_representers[data_type] = representer
add_representer = classmethod(add_representer)
- def add_multi_representer(cls, data_type, representer):
- if not 'yaml_multi_representers' in cls.__dict__:
- cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
- cls.yaml_multi_representers[data_type] = representer
+ def add_multi_representer(self, data_type, representer):
+ if 'yaml_multi_representers' not in self.__dict__:
+ self.yaml_multi_representers = self.yaml_multi_representers.copy()
+ self.yaml_multi_representers[data_type] = representer
add_multi_representer = classmethod(add_multi_representer)
def represent_scalar(self, tag, value, style=None):
@@ -99,7 +93,7 @@ def represent_sequence(self, tag, sequence, flow_style=None):
best_style = True
for item in sequence:
node_item = self.represent_data(item)
- if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ if not isinstance(node_item, ScalarNode) or node_item.style:
best_style = False
value.append(node_item)
if flow_style is None:
@@ -121,9 +115,9 @@ def represent_mapping(self, tag, mapping, flow_style=None):
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
- if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ if not isinstance(node_key, ScalarNode) or node_key.style:
best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ if not isinstance(node_value, ScalarNode) or node_value.style:
best_style = False
value.append((node_key, node_value))
if flow_style is None:
@@ -168,10 +162,7 @@ def represent_unicode(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
def represent_bool(self, data):
- if data:
- value = u'true'
- else:
- value = u'false'
+ value = u'true' if data else u'false'
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
def represent_int(self, data):
@@ -185,7 +176,7 @@ def represent_long(self, data):
inf_value *= inf_value
def represent_float(self, data):
- if data != data or (data == 0.0 and data == 1.0):
+ if data != data:
value = u'.nan'
elif data == self.inf_value:
value = u'.inf'
@@ -223,9 +214,7 @@ def represent_dict(self, data):
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
def represent_set(self, data):
- value = {}
- for key in data:
- value[key] = None
+ value = {key: None for key in data}
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
def represent_date(self, data):
@@ -244,7 +233,7 @@ def represent_yaml_object(self, tag, data, cls, flow_style=None):
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
- raise RepresenterError("cannot represent an object: %s" % data)
+ raise RepresenterError(f"cannot represent an object: {data}")
SafeRepresenter.add_representer(type(None),
SafeRepresenter.represent_none)
@@ -336,12 +325,13 @@ def represent_tuple(self, data):
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data):
- name = u'%s.%s' % (data.__module__, data.__name__)
- return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
+ name = f'{data.__module__}.{data.__name__}'
+ return self.represent_scalar(f'tag:yaml.org,2002:python/name:{name}', u'')
def represent_module(self, data):
return self.represent_scalar(
- u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
+ f'tag:yaml.org,2002:python/module:{data.__name__}', u''
+ )
def represent_instance(self, data):
# For instances of classic classes, we use __getinitargs__ and
@@ -362,27 +352,27 @@ def represent_instance(self, data):
# !!python/object/new node.
cls = data.__class__
- class_name = u'%s.%s' % (cls.__module__, cls.__name__)
+ class_name = f'{cls.__module__}.{cls.__name__}'
args = None
state = None
if hasattr(data, '__getinitargs__'):
args = list(data.__getinitargs__())
- if hasattr(data, '__getstate__'):
- state = data.__getstate__()
- else:
- state = data.__dict__
+ state = data.__getstate__() if hasattr(data, '__getstate__') else data.__dict__
if args is None and isinstance(state, dict):
return self.represent_mapping(
- u'tag:yaml.org,2002:python/object:'+class_name, state)
+ f'tag:yaml.org,2002:python/object:{class_name}', state
+ )
if isinstance(state, dict) and not state:
return self.represent_sequence(
- u'tag:yaml.org,2002:python/object/new:'+class_name, args)
+ f'tag:yaml.org,2002:python/object/new:{class_name}', args
+ )
value = {}
if args:
value['args'] = args
value['state'] = state
return self.represent_mapping(
- u'tag:yaml.org,2002:python/object/new:'+class_name, value)
+ f'tag:yaml.org,2002:python/object/new:{class_name}', value
+ )
def represent_object(self, data):
# We use __reduce__ API to save the data. data.__reduce__ returns
@@ -427,13 +417,14 @@ def represent_object(self, data):
else:
tag = u'tag:yaml.org,2002:python/object/apply:'
newobj = False
- function_name = u'%s.%s' % (function.__module__, function.__name__)
+ function_name = f'{function.__module__}.{function.__name__}'
if not args and not listitems and not dictitems \
- and isinstance(state, dict) and newobj:
+ and isinstance(state, dict) and newobj:
return self.represent_mapping(
- u'tag:yaml.org,2002:python/object:'+function_name, state)
+ f'tag:yaml.org,2002:python/object:{function_name}', state
+ )
if not listitems and not dictitems \
- and isinstance(state, dict) and not state:
+ and isinstance(state, dict) and not state:
return self.represent_sequence(tag+function_name, args)
value = {}
if args:
diff --git a/yaml/yaml2/resolver.py b/yaml/yaml2/resolver.py
index 6b5ab875..ea1c4425 100644
--- a/yaml/yaml2/resolver.py
+++ b/yaml/yaml2/resolver.py
@@ -22,16 +22,16 @@ def __init__(self):
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
- def add_implicit_resolver(cls, tag, regexp, first):
- if not 'yaml_implicit_resolvers' in cls.__dict__:
- cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
+ def add_implicit_resolver(self, tag, regexp, first):
+ if 'yaml_implicit_resolvers' not in self.__dict__:
+ self.yaml_implicit_resolvers = self.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
for ch in first:
- cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+ self.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
add_implicit_resolver = classmethod(add_implicit_resolver)
- def add_path_resolver(cls, tag, path, kind=None):
+ def add_path_resolver(self, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
@@ -44,8 +44,8 @@ def add_path_resolver(cls, tag, path, kind=None):
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
- if not 'yaml_path_resolvers' in cls.__dict__:
- cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ if 'yaml_path_resolvers' not in self.__dict__:
+ self.yaml_path_resolvers = self.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
@@ -55,7 +55,7 @@ def add_path_resolver(cls, tag, path, kind=None):
node_check = element[0]
index_check = True
else:
- raise ResolverError("Invalid path element: %s" % element)
+ raise ResolverError(f"Invalid path element: {element}")
else:
node_check = None
index_check = element
@@ -66,12 +66,12 @@ def add_path_resolver(cls, tag, path, kind=None):
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
- and not isinstance(node_check, basestring) \
- and node_check is not None:
- raise ResolverError("Invalid node checker: %s" % node_check)
+ and not isinstance(node_check, basestring) \
+ and node_check is not None:
+ raise ResolverError(f"Invalid node checker: {node_check}")
if not isinstance(index_check, (basestring, int)) \
- and index_check is not None:
- raise ResolverError("Invalid index checker: %s" % index_check)
+ and index_check is not None:
+ raise ResolverError(f"Invalid index checker: {index_check}")
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
@@ -80,9 +80,9 @@ def add_path_resolver(cls, tag, path, kind=None):
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
- and kind is not None:
- raise ResolverError("Invalid node kind: %s" % kind)
- cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+ and kind is not None:
+ raise ResolverError(f"Invalid node kind: {kind}")
+ self.yaml_path_resolvers[tuple(new_path), kind] = tag
add_path_resolver = classmethod(add_path_resolver)
def descend_resolver(self, current_node, current_index):
@@ -117,16 +117,18 @@ def ascend_resolver(self):
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
- if isinstance(node_check, basestring):
- if current_node.tag != node_check:
- return
- elif node_check is not None:
- if not isinstance(current_node, node_check):
- return
+ if (
+ isinstance(node_check, basestring)
+ and current_node.tag != node_check
+ or not isinstance(node_check, basestring)
+ and node_check is not None
+ and not isinstance(current_node, node_check)
+ ):
+ return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
- and current_index is None:
+ and current_index is None:
return
if isinstance(index_check, basestring):
if not (isinstance(current_index, ScalarNode)
diff --git a/yaml/yaml2/scanner.py b/yaml/yaml2/scanner.py
index 5228fad6..e4ea61c8 100644
--- a/yaml/yaml2/scanner.py
+++ b/yaml/yaml2/scanner.py
@@ -498,11 +498,6 @@ def fetch_block_entry(self):
mark = self.get_mark()
self.tokens.append(BlockSequenceStartToken(mark, mark))
- # It's an error for the block entry to occur in the flow context,
- # but we let the parser detect this.
- else:
- pass
-
# Simple keys are allowed after '-'.
self.allow_simple_key = True
@@ -712,22 +707,20 @@ def check_block_entry(self):
def check_key(self):
# KEY(flow context): '?'
- if self.flow_level:
- return True
-
- # KEY(block context): '?' (' '|'\n')
- else:
- return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+ return (
+ True
+ if self.flow_level
+ else self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+ )
def check_value(self):
# VALUE(flow context): ':'
- if self.flow_level:
- return True
-
- # VALUE(block context): ':' (' '|'\n')
- else:
- return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+ return (
+ True
+ if self.flow_level
+ else self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029'
+ )
def check_plain(self):
@@ -913,28 +906,33 @@ def scan_anchor(self, TokenClass):
# Therefore we restrict aliases to numbers and ASCII letters.
start_mark = self.get_mark()
indicator = self.peek()
- if indicator == u'*':
- name = 'alias'
- else:
- name = 'anchor'
+ name = 'alias' if indicator == u'*' else 'anchor'
self.forward()
length = 0
ch = self.peek(length)
while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- or ch in u'-_':
+ or ch in u'-_':
length += 1
ch = self.peek(length)
if not length:
- raise ScannerError("while scanning an %s" % name, start_mark,
- "expected alphabetic or numeric character, but found %r"
- % ch.encode('utf-8'), self.get_mark())
+ raise ScannerError(
+ f"while scanning an {name}",
+ start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'),
+ self.get_mark(),
+ )
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
- raise ScannerError("while scanning an %s" % name, start_mark,
- "expected alphabetic or numeric character, but found %r"
- % ch.encode('utf-8'), self.get_mark())
+ raise ScannerError(
+ f"while scanning an {name}",
+ start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch.encode('utf-8'),
+ self.get_mark(),
+ )
end_mark = self.get_mark()
return TokenClass(value, start_mark, end_mark)
@@ -983,11 +981,7 @@ def scan_tag(self):
def scan_block_scalar(self, style):
# See the specification for details.
- if style == '>':
- folded = True
- else:
- folded = False
-
+ folded = style == '>'
chunks = []
start_mark = self.get_mark()
@@ -998,8 +992,7 @@ def scan_block_scalar(self, style):
# Determine the indentation level and go to the first non-empty line.
min_indent = self.indent+1
- if min_indent < 1:
- min_indent = 1
+ min_indent = max(min_indent, 1)
if increment is None:
breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
indent = max(min_indent, max_indent)
@@ -1019,32 +1012,19 @@ def scan_block_scalar(self, style):
self.forward(length)
line_break = self.scan_line_break()
breaks, end_mark = self.scan_block_scalar_breaks(indent)
- if self.column == indent and self.peek() != u'\0':
+ if self.column != indent or self.peek() == u'\0':
+ break
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
- # Unfortunately, folding rules are ambiguous.
- #
- # This is the folding according to the specification:
-
- if folded and line_break == u'\n' \
+ if folded and line_break == u'\n' \
and leading_non_space and self.peek() not in u' \t':
- if not breaks:
- chunks.append(u' ')
- else:
- chunks.append(line_break)
-
- # This is Clark Evans's interpretation (also in the spec
- # examples):
- #
- #if folded and line_break == u'\n':
- # if not breaks:
- # if self.peek() not in ' \t':
- # chunks.append(u' ')
- # else:
- # chunks.append(line_break)
- #else:
- # chunks.append(line_break)
+ if not breaks:
+ chunks.append(u' ')
else:
- break
+ chunks.append(line_break)
# Chomp the tail.
if chomping is not False:
@@ -1062,10 +1042,7 @@ def scan_block_scalar_indicators(self, start_mark):
increment = None
ch = self.peek()
if ch in u'+-':
- if ch == '+':
- chomping = True
- else:
- chomping = False
+ chomping = ch == '+'
self.forward()
ch = self.peek()
if ch in u'0123456789':
@@ -1084,10 +1061,7 @@ def scan_block_scalar_indicators(self, start_mark):
self.forward()
ch = self.peek()
if ch in u'+-':
- if ch == '+':
- chomping = True
- else:
- chomping = False
+ chomping = ch == '+'
self.forward()
ch = self.peek()
if ch not in u'\0 \r\n\x85\u2028\u2029':
@@ -1145,10 +1119,7 @@ def scan_flow_scalar(self, style):
# mark the beginning and the end of them. Therefore we are less
# restrictive then the specification requires. We only need to check
# that document separators are not included in scalars.
- if style == '"':
- double = True
- else:
- double = False
+ double = style == '"'
chunks = []
start_mark = self.get_mark()
quote = self.peek()
@@ -1262,8 +1233,10 @@ def scan_flow_scalar_breaks(self, double, start_mark):
# Instead of checking indentation, we check for document
# separators.
prefix = self.prefix(3)
- if (prefix == u'---' or prefix == u'...') \
- and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in [u'---', u'...']
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029'
+ ):
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected document separator", self.get_mark())
while self.peek() in u' \t':
@@ -1335,8 +1308,10 @@ def scan_plain_spaces(self, indent, start_mark):
line_break = self.scan_line_break()
self.allow_simple_key = True
prefix = self.prefix(3)
- if (prefix == u'---' or prefix == u'...') \
- and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in [u'---', u'...']
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029'
+ ):
return
breaks = []
while self.peek() in u' \r\n\x85\u2028\u2029':
@@ -1345,8 +1320,10 @@ def scan_plain_spaces(self, indent, start_mark):
else:
breaks.append(self.scan_line_break())
prefix = self.prefix(3)
- if (prefix == u'---' or prefix == u'...') \
- and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in [u'---', u'...']
+ and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029'
+ ):
return
if line_break != u'\n':
chunks.append(line_break)
@@ -1363,21 +1340,27 @@ def scan_tag_handle(self, name, start_mark):
# tag handles. I have allowed it anyway.
ch = self.peek()
if ch != u'!':
- raise ScannerError("while scanning a %s" % name, start_mark,
- "expected '!', but found %r" % ch.encode('utf-8'),
- self.get_mark())
+ raise ScannerError(
+ f"while scanning a {name}",
+ start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark(),
+ )
length = 1
ch = self.peek(length)
if ch != u' ':
while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- or ch in u'-_':
+ or ch in u'-_':
length += 1
ch = self.peek(length)
if ch != u'!':
self.forward(length)
- raise ScannerError("while scanning a %s" % name, start_mark,
- "expected '!', but found %r" % ch.encode('utf-8'),
- self.get_mark())
+ raise ScannerError(
+ f"while scanning a {name}",
+ start_mark,
+ "expected '!', but found %r" % ch.encode('utf-8'),
+ self.get_mark(),
+ )
length += 1
value = self.prefix(length)
self.forward(length)
@@ -1390,7 +1373,7 @@ def scan_tag_uri(self, name, start_mark):
length = 0
ch = self.peek(length)
while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \
- or ch in u'-;/?:@&=+$,_.!~*\'()[]%':
+ or ch in u'-;/?:@&=+$,_.!~*\'()[]%':
if ch == u'%':
chunks.append(self.prefix(length))
self.forward(length)
@@ -1404,9 +1387,12 @@ def scan_tag_uri(self, name, start_mark):
self.forward(length)
length = 0
if not chunks:
- raise ScannerError("while parsing a %s" % name, start_mark,
- "expected URI, but found %r" % ch.encode('utf-8'),
- self.get_mark())
+ raise ScannerError(
+ f"while parsing a {name}",
+ start_mark,
+ "expected URI, but found %r" % ch.encode('utf-8'),
+ self.get_mark(),
+ )
return u''.join(chunks)
def scan_uri_escapes(self, name, start_mark):
diff --git a/yaml/yaml2/serializer.py b/yaml/yaml2/serializer.py
index 0bf1e96d..2ea6d17b 100644
--- a/yaml/yaml2/serializer.py
+++ b/yaml/yaml2/serializer.py
@@ -93,10 +93,8 @@ def serialize_node(self, node, parent, index):
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
- index = 0
- for item in node.value:
+ for index, item in enumerate(node.value):
self.serialize_node(item, node, index)
- index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
diff --git a/yaml/yaml2/tokens.py b/yaml/yaml2/tokens.py
index 4d0b48a3..5569de8e 100644
--- a/yaml/yaml2/tokens.py
+++ b/yaml/yaml2/tokens.py
@@ -9,7 +9,7 @@ def __repr__(self):
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
- return '%s(%s)' % (self.__class__.__name__, arguments)
+ return f'{self.__class__.__name__}({arguments})'
#class BOMToken(Token):
# id = ''
diff --git a/yaml/yaml3/__init__.py b/yaml/yaml3/__init__.py
index a5e20f94..05d8bd12 100644
--- a/yaml/yaml3/__init__.py
+++ b/yaml/yaml3/__init__.py
@@ -133,10 +133,7 @@ def serialize_all(nodes, stream=None, Dumper=Dumper,
"""
getvalue = None
if stream is None:
- if encoding is None:
- stream = io.StringIO()
- else:
- stream = io.BytesIO()
+ stream = io.StringIO() if encoding is None else io.BytesIO()
getvalue = stream.getvalue
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break,
@@ -171,10 +168,7 @@ def dump_all(documents, stream=None, Dumper=Dumper,
"""
getvalue = None
if stream is None:
- if encoding is None:
- stream = io.StringIO()
- else:
- stream = io.BytesIO()
+ stream = io.StringIO() if encoding is None else io.BytesIO()
getvalue = stream.getvalue
dumper = Dumper(stream, default_style=default_style,
default_flow_style=default_flow_style,
@@ -275,11 +269,11 @@ class YAMLObjectMetaclass(type):
"""
The metaclass for YAMLObject.
"""
- def __init__(cls, name, bases, kwds):
- super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ def __init__(self, name, bases, kwds):
+ super(YAMLObjectMetaclass, self).__init__(name, bases, kwds)
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
- cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
- cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+ self.yaml_loader.add_constructor(self.yaml_tag, self.from_yaml)
+ self.yaml_dumper.add_representer(self, self.to_yaml)
class YAMLObject(metaclass=YAMLObjectMetaclass):
"""
diff --git a/yaml/yaml3/constructor.py b/yaml/yaml3/constructor.py
index 38109fc9..8367d621 100644
--- a/yaml/yaml3/constructor.py
+++ b/yaml/yaml3/constructor.py
@@ -38,18 +38,13 @@ def get_data(self):
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
- if node is not None:
- return self.construct_document(node)
- return None
+ return self.construct_document(node) if node is not None else None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
- for generator in state_generators:
- for dummy in generator:
- pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
@@ -94,10 +89,7 @@ def construct_object(self, node, deep=False):
if isinstance(data, types.GeneratorType):
generator = data
data = next(generator)
- if self.deep_construct:
- for dummy in generator:
- pass
- else:
+ if not self.deep_construct:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
@@ -107,24 +99,33 @@ def construct_object(self, node, deep=False):
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
- raise ConstructorError(None, None,
- "expected a scalar node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a scalar node, but found {node.id}",
+ node.start_mark,
+ )
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
- raise ConstructorError(None, None,
- "expected a sequence node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a sequence node, but found {node.id}",
+ node.start_mark,
+ )
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
- raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a mapping node, but found {node.id}",
+ node.start_mark,
+ )
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
@@ -137,9 +138,12 @@ def construct_mapping(self, node, deep=False):
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
- raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"expected a mapping node, but found {node.id}",
+ node.start_mark,
+ )
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
@@ -149,13 +153,13 @@ def construct_pairs(self, node, deep=False):
@classmethod
def add_constructor(cls, tag, constructor):
- if not 'yaml_constructors' in cls.__dict__:
+ if 'yaml_constructors' not in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
@classmethod
def add_multi_constructor(cls, tag_prefix, multi_constructor):
- if not 'yaml_multi_constructors' in cls.__dict__:
+ if 'yaml_multi_constructors' not in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
@@ -182,19 +186,24 @@ def flatten_mapping(self, node):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing a mapping",
- node.start_mark,
- "expected a mapping for merging, but found %s"
- % subnode.id, subnode.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping",
+ node.start_mark,
+ f"expected a mapping for merging, but found {subnode.id}",
+ subnode.start_mark,
+ )
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "expected a mapping or list of mappings for merging, but found %s"
- % value_node.id, value_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping",
+ node.start_mark,
+ f"expected a mapping or list of mappings for merging, but found {value_node.id}",
+ value_node.start_mark,
+ )
elif key_node.tag == 'tag:yaml.org,2002:value':
key_node.tag = 'tag:yaml.org,2002:str'
index += 1
@@ -228,9 +237,7 @@ def construct_yaml_bool(self, node):
def construct_yaml_int(self, node):
value = self.construct_scalar(node)
value = value.replace('_', '')
- sign = +1
- if value[0] == '-':
- sign = -1
+ sign = -1 if value[0] == '-' else +1
if value[0] in '+-':
value = value[1:]
if value == '0':
@@ -261,9 +268,7 @@ def construct_yaml_int(self, node):
def construct_yaml_float(self, node):
value = self.construct_scalar(node)
value = value.replace('_', '').lower()
- sign = +1
- if value[0] == '-':
- sign = -1
+ sign = -1 if value[0] == '-' else +1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
@@ -286,17 +291,21 @@ def construct_yaml_binary(self, node):
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
- raise ConstructorError(None, None,
- "failed to convert base64 data into ascii: %s" % exc,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"failed to convert base64 data into ascii: {exc}",
+ node.start_mark,
+ )
try:
if hasattr(base64, 'decodebytes'):
return base64.decodebytes(value)
else:
return base64.decodestring(value)
except binascii.Error as exc:
- raise ConstructorError(None, None,
- "failed to decode base64 data: %s" % exc, node.start_mark)
+ raise ConstructorError(
+ None, None, f"failed to decode base64 data: {exc}", node.start_mark
+ )
timestamp_regexp = re.compile(
r'''^(?P[0-9][0-9][0-9][0-9])
@@ -346,13 +355,20 @@ def construct_yaml_omap(self, node):
omap = []
yield omap
if not isinstance(node, SequenceNode):
- raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ raise ConstructorError(
+ "while constructing an ordered map",
+ node.start_mark,
+ f"expected a sequence, but found {node.id}",
+ node.start_mark,
+ )
for subnode in node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing an ordered map", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ raise ConstructorError(
+ "while constructing an ordered map",
+ node.start_mark,
+ f"expected a mapping of length 1, but found {subnode.id}",
+ subnode.start_mark,
+ )
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
@@ -367,13 +383,20 @@ def construct_yaml_pairs(self, node):
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
- raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a sequence, but found %s" % node.id, node.start_mark)
+ raise ConstructorError(
+ "while constructing pairs",
+ node.start_mark,
+ f"expected a sequence, but found {node.id}",
+ node.start_mark,
+ )
for subnode in node.value:
if not isinstance(subnode, MappingNode):
- raise ConstructorError("while constructing pairs", node.start_mark,
- "expected a mapping of length 1, but found %s" % subnode.id,
- subnode.start_mark)
+ raise ConstructorError(
+ "while constructing pairs",
+ node.start_mark,
+ f"expected a mapping of length 1, but found {subnode.id}",
+ subnode.start_mark,
+ )
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
@@ -481,17 +504,21 @@ def construct_python_bytes(self, node):
try:
value = self.construct_scalar(node).encode('ascii')
except UnicodeEncodeError as exc:
- raise ConstructorError(None, None,
- "failed to convert base64 data into ascii: %s" % exc,
- node.start_mark)
+ raise ConstructorError(
+ None,
+ None,
+ f"failed to convert base64 data into ascii: {exc}",
+ node.start_mark,
+ )
try:
if hasattr(base64, 'decodebytes'):
return base64.decodebytes(value)
else:
return base64.decodestring(value)
except binascii.Error as exc:
- raise ConstructorError(None, None,
- "failed to decode base64 data: %s" % exc, node.start_mark)
+ raise ConstructorError(
+ None, None, f"failed to decode base64 data: {exc}", node.start_mark
+ )
def construct_python_long(self, node):
return self.construct_yaml_int(node)
@@ -535,15 +562,13 @@ def find_python_name(self, name, mark):
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
- value = self.construct_scalar(node)
- if value:
+ if value := self.construct_scalar(node):
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value, node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
- value = self.construct_scalar(node)
- if value:
+ if value := self.construct_scalar(node):
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value, node.start_mark)
return self.find_python_module(suffix, node.start_mark)
diff --git a/yaml/yaml3/emitter.py b/yaml/yaml3/emitter.py
index 34cb145a..832dae77 100644
--- a/yaml/yaml3/emitter.py
+++ b/yaml/yaml3/emitter.py
@@ -146,10 +146,7 @@ def need_events(self, count):
def increase_indent(self, flow=False, indentless=False):
self.indents.append(self.indent)
if self.indent is None:
- if flow:
- self.indent = self.best_indent
- else:
- self.indent = 0
+ self.indent = self.best_indent if flow else 0
elif not indentless:
self.indent += self.best_indent
@@ -158,17 +155,15 @@ def increase_indent(self, flow=False, indentless=False):
# Stream handlers.
def expect_stream_start(self):
- if isinstance(self.event, StreamStartEvent):
- if self.event.encoding and not hasattr(self.stream, 'encoding'):
- self.encoding = self.event.encoding
- self.write_stream_start()
- self.state = self.expect_first_document_start
- else:
- raise EmitterError("expected StreamStartEvent, but got %s"
- % self.event)
+ if not isinstance(self.event, StreamStartEvent):
+ raise EmitterError(f"expected StreamStartEvent, but got {self.event}")
+ if self.event.encoding and not hasattr(self.stream, 'encoding'):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
def expect_nothing(self):
- raise EmitterError("expected nothing, but got %s" % self.event)
+ raise EmitterError(f"expected nothing, but got {self.event}")
# Document handlers.
@@ -208,20 +203,17 @@ def expect_document_start(self, first=False):
self.write_stream_end()
self.state = self.expect_nothing
else:
- raise EmitterError("expected DocumentStartEvent, but got %s"
- % self.event)
+ raise EmitterError(f"expected DocumentStartEvent, but got {self.event}")
def expect_document_end(self):
- if isinstance(self.event, DocumentEndEvent):
+ if not isinstance(self.event, DocumentEndEvent):
+ raise EmitterError(f"expected DocumentEndEvent, but got {self.event}")
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator('...', True)
self.write_indent()
- if self.event.explicit:
- self.write_indicator('...', True)
- self.write_indent()
- self.flush_stream()
- self.state = self.expect_document_start
- else:
- raise EmitterError("expected DocumentEndEvent, but got %s"
- % self.event)
+ self.flush_stream()
+ self.state = self.expect_document_start
def expect_document_root(self):
self.states.append(self.expect_document_end)
@@ -244,18 +236,18 @@ def expect_node(self, root=False, sequence=False, mapping=False,
self.expect_scalar()
elif isinstance(self.event, SequenceStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
- or self.check_empty_sequence():
+ or self.check_empty_sequence():
self.expect_flow_sequence()
else:
self.expect_block_sequence()
elif isinstance(self.event, MappingStartEvent):
if self.flow_level or self.canonical or self.event.flow_style \
- or self.check_empty_mapping():
+ or self.check_empty_mapping():
self.expect_flow_mapping()
else:
self.expect_block_mapping()
else:
- raise EmitterError("expected NodeEvent, but got %s" % self.event)
+ raise EmitterError(f"expected NodeEvent, but got {self.event}")
def expect_alias(self):
if self.event.anchor is None:
@@ -479,10 +471,9 @@ def process_tag(self):
if self.event.implicit[0] and tag is None:
tag = '!'
self.prepared_tag = None
- else:
- if (not self.canonical or tag is None) and self.event.implicit:
- self.prepared_tag = None
- return
+ elif (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
if tag is None:
raise EmitterError("tag is not specified")
if self.prepared_tag is None:
@@ -564,15 +555,14 @@ def prepare_tag_prefix(self, prefix):
while end < len(prefix):
ch = prefix[end]
if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
- or ch in '-;/?!:@&=+$,_.~*\'()[]':
+ or ch in '-;/?!:@&=+$,_.~*\'()[]':
end += 1
else:
if start < end:
chunks.append(prefix[start:end])
start = end = end+1
data = ch.encode('utf-8')
- for ch in data:
- chunks.append('%%%02X' % ord(ch))
+ chunks.extend('%%%02X' % ord(ch) for ch in data)
if start < end:
chunks.append(prefix[start:end])
return ''.join(chunks)
@@ -587,7 +577,7 @@ def prepare_tag(self, tag):
prefixes = sorted(self.tag_prefixes.keys())
for prefix in prefixes:
if tag.startswith(prefix) \
- and (prefix == '!' or len(prefix) < len(tag)):
+ and (prefix == '!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
suffix = tag[len(prefix):]
chunks = []
@@ -595,23 +585,19 @@ def prepare_tag(self, tag):
while end < len(suffix):
ch = suffix[end]
if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
- or ch in '-;/?:@&=+$,_.~*\'()[]' \
- or (ch == '!' and handle != '!'):
+ or ch in '-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == '!' and handle != '!'):
end += 1
else:
if start < end:
chunks.append(suffix[start:end])
start = end = end+1
data = ch.encode('utf-8')
- for ch in data:
- chunks.append('%%%02X' % ord(ch))
+ chunks.extend('%%%02X' % ord(ch) for ch in data)
if start < end:
chunks.append(suffix[start:end])
suffix_text = ''.join(chunks)
- if handle:
- return '%s%s' % (handle, suffix_text)
- else:
- return '!<%s>' % suffix_text
+ return f'{handle}{suffix_text}' if handle else f'!<{suffix_text}>'
def prepare_anchor(self, anchor):
if not anchor:
@@ -798,10 +784,7 @@ def write_stream_end(self):
def write_indicator(self, indicator, need_whitespace,
whitespace=False, indention=False):
- if self.whitespace or not need_whitespace:
- data = indicator
- else:
- data = ' '+indicator
+ data = indicator if self.whitespace or not need_whitespace else f' {indicator}'
self.whitespace = whitespace
self.indention = self.indention and indention
self.column += len(data)
@@ -862,7 +845,7 @@ def write_single_quoted(self, text, split=True):
if spaces:
if ch is None or ch != ' ':
if start+1 == end and self.column > self.best_width and split \
- and start != 0 and end != len(text):
+ and start != 0 and end != len(text):
self.write_indent()
else:
data = text[start:end]
@@ -882,15 +865,14 @@ def write_single_quoted(self, text, split=True):
self.write_line_break(br)
self.write_indent()
start = end
- else:
- if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'':
- if start < end:
- data = text[start:end]
- self.column += len(data)
- if self.encoding:
- data = data.encode(self.encoding)
- self.stream.write(data)
- start = end
+ elif ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
if ch == '\'':
data = '\'\''
self.column += 2
diff --git a/yaml/yaml3/events.py b/yaml/yaml3/events.py
index f79ad389..3eb33010 100644
--- a/yaml/yaml3/events.py
+++ b/yaml/yaml3/events.py
@@ -10,7 +10,7 @@ def __repr__(self):
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
- return '%s(%s)' % (self.__class__.__name__, arguments)
+ return f'{self.__class__.__name__}({arguments})'
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
diff --git a/yaml/yaml3/parser.py b/yaml/yaml3/parser.py
index 13a5995d..34f5b32a 100644
--- a/yaml/yaml3/parser.py
+++ b/yaml/yaml3/parser.py
@@ -138,23 +138,20 @@ def parse_stream_start(self):
def parse_implicit_document_start(self):
- # Parse an implicit document.
- if not self.check_token(DirectiveToken, DocumentStartToken,
+ if self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
- self.tag_handles = self.DEFAULT_TAGS
- token = self.peek_token()
- start_mark = end_mark = token.start_mark
- event = DocumentStartEvent(start_mark, end_mark,
- explicit=False)
-
- # Prepare the next state.
- self.states.append(self.parse_document_end)
- self.state = self.parse_block_node
+ return self.parse_document_start()
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
- return event
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
- else:
- return self.parse_document_start()
+ return event
def parse_document_start(self):
@@ -206,13 +203,13 @@ def parse_document_end(self):
return event
def parse_document_content(self):
- if self.check_token(DirectiveToken,
- DocumentStartToken, DocumentEndToken, StreamEndToken):
- event = self.process_empty_scalar(self.peek_token().start_mark)
- self.state = self.states.pop()
- return event
- else:
+ if not self.check_token(
+ DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
+ ):
return self.parse_block_node()
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
def process_directives(self):
self.yaml_version = None
@@ -300,14 +297,14 @@ def parse_node(self, block=False, indentless_sequence=False):
anchor = token.value
if tag is not None:
handle, suffix = tag
- if handle is not None:
- if handle not in self.tag_handles:
- raise ParserError("while parsing a node", start_mark,
- "found undefined tag handle %r" % handle,
- tag_mark)
- tag = self.tag_handles[handle]+suffix
- else:
+ if handle is None:
tag = suffix
+ elif handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle,
+ tag_mark)
+ else:
+ tag = self.tag_handles[handle]+suffix
#if tag == '!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
@@ -321,54 +318,53 @@ def parse_node(self, block=False, indentless_sequence=False):
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
- else:
- if self.check_token(ScalarToken):
- token = self.get_token()
- end_mark = token.end_mark
- if (token.plain and tag is None) or tag == '!':
- implicit = (True, False)
- elif tag is None:
- implicit = (False, True)
- else:
- implicit = (False, False)
- event = ScalarEvent(anchor, tag, implicit, token.value,
- start_mark, end_mark, style=token.style)
- self.state = self.states.pop()
- elif self.check_token(FlowSequenceStartToken):
- end_mark = self.peek_token().end_mark
- event = SequenceStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=True)
- self.state = self.parse_flow_sequence_first_entry
- elif self.check_token(FlowMappingStartToken):
- end_mark = self.peek_token().end_mark
- event = MappingStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=True)
- self.state = self.parse_flow_mapping_first_key
- elif block and self.check_token(BlockSequenceStartToken):
- end_mark = self.peek_token().start_mark
- event = SequenceStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=False)
- self.state = self.parse_block_sequence_first_entry
- elif block and self.check_token(BlockMappingStartToken):
- end_mark = self.peek_token().start_mark
- event = MappingStartEvent(anchor, tag, implicit,
- start_mark, end_mark, flow_style=False)
- self.state = self.parse_block_mapping_first_key
- elif anchor is not None or tag is not None:
- # Empty scalars are allowed even if a tag or an anchor is
- # specified.
- event = ScalarEvent(anchor, tag, (implicit, False), '',
- start_mark, end_mark)
- self.state = self.states.pop()
+ elif self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == '!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
else:
- if block:
- node = 'block'
- else:
- node = 'flow'
- token = self.peek_token()
- raise ParserError("while parsing a %s node" % node, start_mark,
- "expected the node content, but found %r" % token.id,
- token.start_mark)
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), '',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ node = 'block' if block else 'flow'
+ token = self.peek_token()
+ raise ParserError(
+ f"while parsing a {node} node",
+ start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark,
+ )
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
diff --git a/yaml/yaml3/reader.py b/yaml/yaml3/reader.py
index f70e920f..b4fa0b2c 100644
--- a/yaml/yaml3/reader.py
+++ b/yaml/yaml3/reader.py
@@ -136,8 +136,7 @@ def determine_encoding(self):
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
- match = self.NON_PRINTABLE.search(data)
- if match:
+ if match := self.NON_PRINTABLE.search(data):
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
diff --git a/yaml/yaml3/representer.py b/yaml/yaml3/representer.py
index 67cd6fd2..98bad0d9 100644
--- a/yaml/yaml3/representer.py
+++ b/yaml/yaml3/representer.py
@@ -30,16 +30,10 @@ def represent(self, data):
self.alias_key = None
def represent_data(self, data):
- if self.ignore_aliases(data):
- self.alias_key = None
- else:
- self.alias_key = id(data)
+ self.alias_key = None if self.ignore_aliases(data) else id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
- node = self.represented_objects[self.alias_key]
- #if node is None:
- # raise RepresenterError("recursive objects are not allowed: %r" % data)
- return node
+ return self.represented_objects[self.alias_key]
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
@@ -63,13 +57,13 @@ def represent_data(self, data):
@classmethod
def add_representer(cls, data_type, representer):
- if not 'yaml_representers' in cls.__dict__:
+ if 'yaml_representers' not in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
@classmethod
def add_multi_representer(cls, data_type, representer):
- if not 'yaml_multi_representers' in cls.__dict__:
+ if 'yaml_multi_representers' not in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
@@ -89,7 +83,7 @@ def represent_sequence(self, tag, sequence, flow_style=None):
best_style = True
for item in sequence:
node_item = self.represent_data(item)
- if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ if not isinstance(node_item, ScalarNode) or node_item.style:
best_style = False
value.append(node_item)
if flow_style is None:
@@ -114,9 +108,9 @@ def represent_mapping(self, tag, mapping, flow_style=None):
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
- if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ if not isinstance(node_key, ScalarNode) or node_key.style:
best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ if not isinstance(node_value, ScalarNode) or node_value.style:
best_style = False
value.append((node_key, node_value))
if flow_style is None:
@@ -151,10 +145,7 @@ def represent_binary(self, data):
return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
def represent_bool(self, data):
- if data:
- value = 'true'
- else:
- value = 'false'
+ value = 'true' if data else 'false'
return self.represent_scalar('tag:yaml.org,2002:bool', value)
def represent_int(self, data):
@@ -165,7 +156,7 @@ def represent_int(self, data):
inf_value *= inf_value
def represent_float(self, data):
- if data != data or (data == 0.0 and data == 1.0):
+ if data != data:
value = '.nan'
elif data == self.inf_value:
value = '.inf'
@@ -203,9 +194,7 @@ def represent_dict(self, data):
return self.represent_mapping('tag:yaml.org,2002:map', data)
def represent_set(self, data):
- value = {}
- for key in data:
- value[key] = None
+ value = {key: None for key in data}
return self.represent_mapping('tag:yaml.org,2002:set', value)
def represent_date(self, data):
@@ -224,7 +213,7 @@ def represent_yaml_object(self, tag, data, cls, flow_style=None):
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
- raise RepresenterError("cannot represent an object: %s" % data)
+ raise RepresenterError(f"cannot represent an object: {data}")
SafeRepresenter.add_representer(type(None),
SafeRepresenter.represent_none)
@@ -282,12 +271,13 @@ def represent_tuple(self, data):
return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data):
- name = '%s.%s' % (data.__module__, data.__name__)
- return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '')
+ name = f'{data.__module__}.{data.__name__}'
+ return self.represent_scalar(f'tag:yaml.org,2002:python/name:{name}', '')
def represent_module(self, data):
return self.represent_scalar(
- 'tag:yaml.org,2002:python/module:'+data.__name__, '')
+ f'tag:yaml.org,2002:python/module:{data.__name__}', ''
+ )
def represent_object(self, data):
# We use __reduce__ API to save the data. data.__reduce__ returns
@@ -332,13 +322,14 @@ def represent_object(self, data):
else:
tag = 'tag:yaml.org,2002:python/object/apply:'
newobj = False
- function_name = '%s.%s' % (function.__module__, function.__name__)
+ function_name = f'{function.__module__}.{function.__name__}'
if not args and not listitems and not dictitems \
- and isinstance(state, dict) and newobj:
+ and isinstance(state, dict) and newobj:
return self.represent_mapping(
- 'tag:yaml.org,2002:python/object:'+function_name, state)
+ f'tag:yaml.org,2002:python/object:{function_name}', state
+ )
if not listitems and not dictitems \
- and isinstance(state, dict) and not state:
+ and isinstance(state, dict) and not state:
return self.represent_sequence(tag+function_name, args)
value = {}
if args:
diff --git a/yaml/yaml3/resolver.py b/yaml/yaml3/resolver.py
index 0eece258..f735d201 100644
--- a/yaml/yaml3/resolver.py
+++ b/yaml/yaml3/resolver.py
@@ -24,7 +24,7 @@ def __init__(self):
@classmethod
def add_implicit_resolver(cls, tag, regexp, first):
- if not 'yaml_implicit_resolvers' in cls.__dict__:
+ if 'yaml_implicit_resolvers' not in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
@@ -45,7 +45,7 @@ def add_path_resolver(cls, tag, path, kind=None):
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
- if not 'yaml_path_resolvers' in cls.__dict__:
+ if 'yaml_path_resolvers' not in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
@@ -56,7 +56,7 @@ def add_path_resolver(cls, tag, path, kind=None):
node_check = element[0]
index_check = True
else:
- raise ResolverError("Invalid path element: %s" % element)
+ raise ResolverError(f"Invalid path element: {element}")
else:
node_check = None
index_check = element
@@ -67,12 +67,12 @@ def add_path_resolver(cls, tag, path, kind=None):
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
- and not isinstance(node_check, str) \
- and node_check is not None:
- raise ResolverError("Invalid node checker: %s" % node_check)
+ and not isinstance(node_check, str) \
+ and node_check is not None:
+ raise ResolverError(f"Invalid node checker: {node_check}")
if not isinstance(index_check, (str, int)) \
- and index_check is not None:
- raise ResolverError("Invalid index checker: %s" % index_check)
+ and index_check is not None:
+ raise ResolverError(f"Invalid index checker: {index_check}")
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
@@ -81,8 +81,8 @@ def add_path_resolver(cls, tag, path, kind=None):
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
- and kind is not None:
- raise ResolverError("Invalid node kind: %s" % kind)
+ and kind is not None:
+ raise ResolverError(f"Invalid node kind: {kind}")
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
def descend_resolver(self, current_node, current_index):
@@ -117,16 +117,18 @@ def ascend_resolver(self):
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
- if isinstance(node_check, str):
- if current_node.tag != node_check:
- return
- elif node_check is not None:
- if not isinstance(current_node, node_check):
- return
+ if (
+ isinstance(node_check, str)
+ and current_node.tag != node_check
+ or not isinstance(node_check, str)
+ and node_check is not None
+ and not isinstance(current_node, node_check)
+ ):
+ return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
- and current_index is None:
+ and current_index is None:
return
if isinstance(index_check, str):
if not (isinstance(current_index, ScalarNode)
diff --git a/yaml/yaml3/scanner.py b/yaml/yaml3/scanner.py
index 494d975b..cac5928c 100644
--- a/yaml/yaml3/scanner.py
+++ b/yaml/yaml3/scanner.py
@@ -498,11 +498,6 @@ def fetch_block_entry(self):
mark = self.get_mark()
self.tokens.append(BlockSequenceStartToken(mark, mark))
- # It's an error for the block entry to occur in the flow context,
- # but we let the parser detect this.
- else:
- pass
-
# Simple keys are allowed after '-'.
self.allow_simple_key = True
@@ -712,22 +707,12 @@ def check_block_entry(self):
def check_key(self):
# KEY(flow context): '?'
- if self.flow_level:
- return True
-
- # KEY(block context): '?' (' '|'\n')
- else:
- return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+ return True if self.flow_level else self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_value(self):
# VALUE(flow context): ':'
- if self.flow_level:
- return True
-
- # VALUE(block context): ':' (' '|'\n')
- else:
- return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+ return True if self.flow_level else self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_plain(self):
@@ -908,28 +893,31 @@ def scan_anchor(self, TokenClass):
# Therefore we restrict aliases to numbers and ASCII letters.
start_mark = self.get_mark()
indicator = self.peek()
- if indicator == '*':
- name = 'alias'
- else:
- name = 'anchor'
+ name = 'alias' if indicator == '*' else 'anchor'
self.forward()
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
- or ch in '-_':
+ or ch in '-_':
length += 1
ch = self.peek(length)
if not length:
- raise ScannerError("while scanning an %s" % name, start_mark,
- "expected alphabetic or numeric character, but found %r"
- % ch, self.get_mark())
+ raise ScannerError(
+ f"while scanning an {name}",
+ start_mark,
+ "expected alphabetic or numeric character, but found %r" % ch,
+ self.get_mark(),
+ )
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
- raise ScannerError("while scanning an %s" % name, start_mark,
- "expected alphabetic or numeric character, but found %r"
- % ch, self.get_mark())
+ raise ScannerError(
+ f"while scanning an {name}",
+ start_mark,
+ "expected alphabetic or numeric character, but found %r" % ch,
+ self.get_mark(),
+ )
end_mark = self.get_mark()
return TokenClass(value, start_mark, end_mark)
@@ -977,11 +965,7 @@ def scan_tag(self):
def scan_block_scalar(self, style):
# See the specification for details.
- if style == '>':
- folded = True
- else:
- folded = False
-
+ folded = style == '>'
chunks = []
start_mark = self.get_mark()
@@ -992,8 +976,7 @@ def scan_block_scalar(self, style):
# Determine the indentation level and go to the first non-empty line.
min_indent = self.indent+1
- if min_indent < 1:
- min_indent = 1
+ min_indent = max(min_indent, 1)
if increment is None:
breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
indent = max(min_indent, max_indent)
@@ -1013,32 +996,19 @@ def scan_block_scalar(self, style):
self.forward(length)
line_break = self.scan_line_break()
breaks, end_mark = self.scan_block_scalar_breaks(indent)
- if self.column == indent and self.peek() != '\0':
+ if self.column != indent or self.peek() == '\0':
+ break
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
- # Unfortunately, folding rules are ambiguous.
- #
- # This is the folding according to the specification:
-
- if folded and line_break == '\n' \
+ if folded and line_break == '\n' \
and leading_non_space and self.peek() not in ' \t':
- if not breaks:
- chunks.append(' ')
- else:
- chunks.append(line_break)
-
- # This is Clark Evans's interpretation (also in the spec
- # examples):
- #
- #if folded and line_break == '\n':
- # if not breaks:
- # if self.peek() not in ' \t':
- # chunks.append(' ')
- # else:
- # chunks.append(line_break)
- #else:
- # chunks.append(line_break)
+ if not breaks:
+ chunks.append(' ')
else:
- break
+ chunks.append(line_break)
# Chomp the tail.
if chomping is not False:
@@ -1056,10 +1026,7 @@ def scan_block_scalar_indicators(self, start_mark):
increment = None
ch = self.peek()
if ch in '+-':
- if ch == '+':
- chomping = True
- else:
- chomping = False
+ chomping = ch == '+'
self.forward()
ch = self.peek()
if ch in '0123456789':
@@ -1078,10 +1045,7 @@ def scan_block_scalar_indicators(self, start_mark):
self.forward()
ch = self.peek()
if ch in '+-':
- if ch == '+':
- chomping = True
- else:
- chomping = False
+ chomping = ch == '+'
self.forward()
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
@@ -1139,10 +1103,7 @@ def scan_flow_scalar(self, style):
# mark the beginning and the end of them. Therefore we are less
# restrictive then the specification requires. We only need to check
# that document separators are not included in scalars.
- if style == '"':
- double = True
- else:
- double = False
+ double = style == '"'
chunks = []
start_mark = self.get_mark()
quote = self.peek()
@@ -1256,8 +1217,10 @@ def scan_flow_scalar_breaks(self, double, start_mark):
# Instead of checking indentation, we check for document
# separators.
prefix = self.prefix(3)
- if (prefix == '---' or prefix == '...') \
- and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in ['---', '...']
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029'
+ ):
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected document separator", self.get_mark())
while self.peek() in ' \t':
@@ -1329,8 +1292,10 @@ def scan_plain_spaces(self, indent, start_mark):
line_break = self.scan_line_break()
self.allow_simple_key = True
prefix = self.prefix(3)
- if (prefix == '---' or prefix == '...') \
- and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in ['---', '...']
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029'
+ ):
return
breaks = []
while self.peek() in ' \r\n\x85\u2028\u2029':
@@ -1339,8 +1304,10 @@ def scan_plain_spaces(self, indent, start_mark):
else:
breaks.append(self.scan_line_break())
prefix = self.prefix(3)
- if (prefix == '---' or prefix == '...') \
- and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ if (
+ prefix in ['---', '...']
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029'
+ ):
return
if line_break != '\n':
chunks.append(line_break)
@@ -1357,19 +1324,27 @@ def scan_tag_handle(self, name, start_mark):
# tag handles. I have allowed it anyway.
ch = self.peek()
if ch != '!':
- raise ScannerError("while scanning a %s" % name, start_mark,
- "expected '!', but found %r" % ch, self.get_mark())
+ raise ScannerError(
+ f"while scanning a {name}",
+ start_mark,
+ "expected '!', but found %r" % ch,
+ self.get_mark(),
+ )
length = 1
ch = self.peek(length)
if ch != ' ':
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
- or ch in '-_':
+ or ch in '-_':
length += 1
ch = self.peek(length)
if ch != '!':
self.forward(length)
- raise ScannerError("while scanning a %s" % name, start_mark,
- "expected '!', but found %r" % ch, self.get_mark())
+ raise ScannerError(
+ f"while scanning a {name}",
+ start_mark,
+ "expected '!', but found %r" % ch,
+ self.get_mark(),
+ )
length += 1
value = self.prefix(length)
self.forward(length)
@@ -1382,7 +1357,7 @@ def scan_tag_uri(self, name, start_mark):
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
- or ch in '-;/?:@&=+$,_.!~*\'()[]%':
+ or ch in '-;/?:@&=+$,_.!~*\'()[]%':
if ch == '%':
chunks.append(self.prefix(length))
self.forward(length)
@@ -1396,8 +1371,12 @@ def scan_tag_uri(self, name, start_mark):
self.forward(length)
length = 0
if not chunks:
- raise ScannerError("while parsing a %s" % name, start_mark,
- "expected URI, but found %r" % ch, self.get_mark())
+ raise ScannerError(
+ f"while parsing a {name}",
+ start_mark,
+ "expected URI, but found %r" % ch,
+ self.get_mark(),
+ )
return ''.join(chunks)
def scan_uri_escapes(self, name, start_mark):
@@ -1408,15 +1387,19 @@ def scan_uri_escapes(self, name, start_mark):
self.forward()
for k in range(2):
if self.peek(k) not in '0123456789ABCDEFabcdef':
- raise ScannerError("while scanning a %s" % name, start_mark,
- "expected URI escape sequence of 2 hexdecimal numbers, but found %r"
- % self.peek(k), self.get_mark())
+ raise ScannerError(
+ f"while scanning a {name}",
+ start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r"
+ % self.peek(k),
+ self.get_mark(),
+ )
codes.append(int(self.prefix(2), 16))
self.forward(2)
try:
value = bytes(codes).decode('utf-8')
except UnicodeDecodeError as exc:
- raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ raise ScannerError(f"while scanning a {name}", start_mark, str(exc), mark)
return value
def scan_line_break(self):
diff --git a/yaml/yaml3/serializer.py b/yaml/yaml3/serializer.py
index fe911e67..026c8755 100644
--- a/yaml/yaml3/serializer.py
+++ b/yaml/yaml3/serializer.py
@@ -93,10 +93,8 @@ def serialize_node(self, node, parent, index):
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
- index = 0
- for item in node.value:
+ for index, item in enumerate(node.value):
self.serialize_node(item, node, index)
- index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
diff --git a/yaml/yaml3/tokens.py b/yaml/yaml3/tokens.py
index 4d0b48a3..5569de8e 100644
--- a/yaml/yaml3/tokens.py
+++ b/yaml/yaml3/tokens.py
@@ -9,7 +9,7 @@ def __repr__(self):
attributes.sort()
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
- return '%s(%s)' % (self.__class__.__name__, arguments)
+ return f'{self.__class__.__name__}({arguments})'
#class BOMToken(Token):
# id = ''