mirror of
https://github.com/alicevision/Meshroom.git
synced 2025-04-29 02:08:08 +02:00
Apply flynt to use f-strings
This commit is contained in:
parent
ee679fcf34
commit
51b04bc077
23 changed files with 141 additions and 142 deletions
|
@ -86,7 +86,7 @@ class Signal(object):
|
|||
Connects the signal to any callable object
|
||||
"""
|
||||
if not callable(slot):
|
||||
raise ValueError("Connection to non-callable '%s' object failed" % slot.__class__.__name__)
|
||||
raise ValueError(f"Connection to non-callable '{slot.__class__.__name__}' object failed")
|
||||
|
||||
if isinstance(slot, (partial, Signal)) or '<' in slot.__name__:
|
||||
# If it's a partial, a Signal or a lambda. The '<' check is the only py2 and py3 compatible way I could find
|
||||
|
@ -199,7 +199,7 @@ class SignalFactory(dict):
|
|||
Emits a signal by name if it exists. Any additional args or kwargs are passed to the signal
|
||||
:param signalName: the signal name to emit
|
||||
"""
|
||||
assert signalName in self, "%s is not a registered signal" % signalName
|
||||
assert signalName in self, f"{signalName} is not a registered signal"
|
||||
self[signalName].emit(*args, **kwargs)
|
||||
|
||||
def connect(self, signalName, slot):
|
||||
|
@ -208,7 +208,7 @@ class SignalFactory(dict):
|
|||
:param signalName: the signal name to connect to
|
||||
:param slot: the callable slot to register
|
||||
"""
|
||||
assert signalName in self, "%s is not a registered signal" % signalName
|
||||
assert signalName in self, f"{signalName} is not a registered signal"
|
||||
self[signalName].connect(slot)
|
||||
|
||||
def block(self, signals=None, isBlocked=True):
|
||||
|
@ -230,7 +230,7 @@ class SignalFactory(dict):
|
|||
|
||||
for signal in signals:
|
||||
if signal not in self:
|
||||
raise RuntimeError("Could not find signal matching %s" % signal)
|
||||
raise RuntimeError(f"Could not find signal matching {signal}")
|
||||
self[signal].block(isBlocked)
|
||||
|
||||
|
||||
|
|
|
@ -279,7 +279,7 @@ class QObjectListModel(QtCore.QAbstractListModel):
|
|||
if key is None:
|
||||
return
|
||||
if key in self._objectByKey:
|
||||
raise ValueError("Object key {}:{} is not unique".format(self._keyAttrName, key))
|
||||
raise ValueError(f"Object key {self._keyAttrName}:{key} is not unique")
|
||||
|
||||
self._objectByKey[key] = item
|
||||
|
||||
|
|
|
@ -72,10 +72,10 @@ def loadPlugins(folder, packageName, classType):
|
|||
try:
|
||||
pluginMod = importlib.import_module(pluginModuleName, package=package.__name__)
|
||||
plugins = [plugin for name, plugin in inspect.getmembers(pluginMod, inspect.isclass)
|
||||
if plugin.__module__ == '{}.{}'.format(package.__name__, pluginName)
|
||||
if plugin.__module__ == f'{package.__name__}.{pluginName}'
|
||||
and issubclass(plugin, classType)]
|
||||
if not plugins:
|
||||
logging.warning("No class defined in plugin: {}".format(pluginModuleName))
|
||||
logging.warning(f"No class defined in plugin: {pluginModuleName}")
|
||||
|
||||
importPlugin = True
|
||||
for p in plugins:
|
||||
|
@ -91,7 +91,7 @@ def loadPlugins(folder, packageName, classType):
|
|||
if importPlugin:
|
||||
pluginTypes.extend(plugins)
|
||||
except Exception as e:
|
||||
errors.append(' * {}: {}'.format(pluginName, str(e)))
|
||||
errors.append(f' * {pluginName}: {str(e)}')
|
||||
|
||||
if errors:
|
||||
logging.warning('== The following "{package}" plugins could not be loaded ==\n'
|
||||
|
@ -285,7 +285,7 @@ def registerNodeType(nodeType):
|
|||
"""
|
||||
global nodesDesc
|
||||
if nodeType.__name__ in nodesDesc:
|
||||
logging.error("Node Desc {} is already registered.".format(nodeType.__name__))
|
||||
logging.error(f"Node Desc {nodeType.__name__} is already registered.")
|
||||
nodesDesc[nodeType.__name__] = nodeType
|
||||
|
||||
|
||||
|
@ -307,13 +307,13 @@ def loadAllNodes(folder):
|
|||
nodeTypes = loadNodes(folder, package)
|
||||
for nodeType in nodeTypes:
|
||||
registerNodeType(nodeType)
|
||||
logging.debug('Nodes loaded [{}]: {}'.format(package, ', '.join([nodeType.__name__ for nodeType in nodeTypes])))
|
||||
logging.debug(f"Nodes loaded [{package}]: {', '.join([nodeType.__name__ for nodeType in nodeTypes])}")
|
||||
|
||||
|
||||
def registerSubmitter(s):
|
||||
global submitters
|
||||
if s.name in submitters:
|
||||
logging.error("Submitter {} is already registered.".format(s.name))
|
||||
logging.error(f"Submitter {s.name} is already registered.")
|
||||
submitters[s.name] = s
|
||||
|
||||
|
||||
|
@ -354,4 +354,4 @@ def initPipelines():
|
|||
if os.path.isdir(f):
|
||||
loadPipelineTemplates(f)
|
||||
else:
|
||||
logging.warning("Pipeline templates folder '{}' does not exist.".format(f))
|
||||
logging.warning(f"Pipeline templates folder '{f}' does not exist.")
|
||||
|
|
|
@ -91,19 +91,19 @@ class Attribute(BaseObject):
|
|||
def getFullName(self):
|
||||
""" Name inside the Graph: groupName.name """
|
||||
if isinstance(self.root, ListAttribute):
|
||||
return '{}[{}]'.format(self.root.getFullName(), self.root.index(self))
|
||||
return f'{self.root.getFullName()}[{self.root.index(self)}]'
|
||||
elif isinstance(self.root, GroupAttribute):
|
||||
return '{}.{}'.format(self.root.getFullName(), self.getName())
|
||||
return f'{self.root.getFullName()}.{self.getName()}'
|
||||
return self.getName()
|
||||
|
||||
def getFullNameToNode(self):
|
||||
""" Name inside the Graph: nodeName.groupName.name """
|
||||
return '{}.{}'.format(self.node.name, self.getFullName())
|
||||
return f'{self.node.name}.{self.getFullName()}'
|
||||
|
||||
def getFullNameToGraph(self):
|
||||
""" Name inside the Graph: graphName.nodeName.groupName.name """
|
||||
graphName = self.node.graph.name if self.node.graph else "UNDEFINED"
|
||||
return '{}.{}'.format(graphName, self.getFullNameToNode())
|
||||
return f'{graphName}.{self.getFullNameToNode()}'
|
||||
|
||||
def asLinkExpr(self):
|
||||
""" Return link expression for this Attribute """
|
||||
|
@ -130,17 +130,17 @@ class Attribute(BaseObject):
|
|||
if isinstance(self.root, ListAttribute):
|
||||
return self.root.getFullLabel()
|
||||
elif isinstance(self.root, GroupAttribute):
|
||||
return '{} {}'.format(self.root.getFullLabel(), self.getLabel())
|
||||
return f'{self.root.getFullLabel()} {self.getLabel()}'
|
||||
return self.getLabel()
|
||||
|
||||
def getFullLabelToNode(self):
|
||||
""" Label inside the Graph: nodeLabel groupLabel Label """
|
||||
return '{} {}'.format(self.node.label, self.getFullLabel())
|
||||
return f'{self.node.label} {self.getFullLabel()}'
|
||||
|
||||
def getFullLabelToGraph(self):
|
||||
""" Label inside the Graph: graphName nodeLabel groupLabel Label """
|
||||
graphName = self.node.graph.name if self.node.graph else "UNDEFINED"
|
||||
return '{} {}'.format(graphName, self.getFullLabelToNode())
|
||||
return f'{graphName} {self.getFullLabelToNode()}'
|
||||
|
||||
def getEnabled(self):
|
||||
if isinstance(self.desc.enabled, types.FunctionType):
|
||||
|
@ -350,7 +350,7 @@ class Attribute(BaseObject):
|
|||
g.addEdge(node.attribute(linkAttrName), self)
|
||||
except KeyError as err:
|
||||
logging.warning('Connect Attribute from Expression failed.')
|
||||
logging.warning('Expression: "{exp}"\nError: "{err}".'.format(exp=v, err=err))
|
||||
logging.warning(f'Expression: "{v}"\nError: "{err}".')
|
||||
self.resetToDefaultValue()
|
||||
|
||||
def getExportValue(self):
|
||||
|
@ -389,11 +389,11 @@ class Attribute(BaseObject):
|
|||
assert (isinstance(self.value, Sequence) and not isinstance(self.value, str))
|
||||
v = self.attributeDesc.joinChar.join(self.getEvalValue())
|
||||
if withQuotes and v:
|
||||
return '"{}"'.format(v)
|
||||
return f'"{v}"'
|
||||
return v
|
||||
# String, File, single value Choice are based on strings and should includes quotes to deal with spaces
|
||||
if withQuotes and isinstance(self.attributeDesc, (desc.StringParam, desc.File, desc.ChoiceParam)):
|
||||
return '"{}"'.format(self.getEvalValue())
|
||||
return f'"{self.getEvalValue()}"'
|
||||
return str(self.getEvalValue())
|
||||
|
||||
def defaultValue(self):
|
||||
|
@ -685,7 +685,7 @@ class ListAttribute(Attribute):
|
|||
else:
|
||||
v = self.attributeDesc.joinChar.join([v.getValueStr(withQuotes=False) for v in self.value])
|
||||
if withQuotes and v:
|
||||
return '"{}"'.format(v)
|
||||
return f'"{v}"'
|
||||
return v
|
||||
|
||||
def updateInternals(self):
|
||||
|
@ -730,11 +730,11 @@ class GroupAttribute(Attribute):
|
|||
self._value.get(key).value = v
|
||||
elif isinstance(value, (list, tuple)):
|
||||
if len(self.desc._groupDesc) != len(value):
|
||||
raise AttributeError("Incorrect number of values on GroupAttribute: {}".format(str(value)))
|
||||
raise AttributeError(f"Incorrect number of values on GroupAttribute: {str(value)}")
|
||||
for attrDesc, v in zip(self.desc._groupDesc, value):
|
||||
self._value.get(attrDesc.name).value = v
|
||||
else:
|
||||
raise AttributeError("Failed to set on GroupAttribute: {}".format(str(value)))
|
||||
raise AttributeError(f"Failed to set on GroupAttribute: {str(value)}")
|
||||
|
||||
def upgradeValue(self, exportedValue):
|
||||
value = self.validateValue(exportedValue)
|
||||
|
@ -745,11 +745,11 @@ class GroupAttribute(Attribute):
|
|||
self._value.get(key).upgradeValue(v)
|
||||
elif isinstance(value, (list, tuple)):
|
||||
if len(self.desc._groupDesc) != len(value):
|
||||
raise AttributeError("Incorrect number of values on GroupAttribute: {}".format(str(value)))
|
||||
raise AttributeError(f"Incorrect number of values on GroupAttribute: {str(value)}")
|
||||
for attrDesc, v in zip(self.desc._groupDesc, value):
|
||||
self._value.get(attrDesc.name).upgradeValue(v)
|
||||
else:
|
||||
raise AttributeError("Failed to set on GroupAttribute: {}".format(str(value)))
|
||||
raise AttributeError(f"Failed to set on GroupAttribute: {str(value)}")
|
||||
|
||||
def initValue(self):
|
||||
self._value = DictModel(keyAttrName='name', parent=self)
|
||||
|
@ -816,7 +816,7 @@ class GroupAttribute(Attribute):
|
|||
strBegin = self.attributeDesc.brackets[0]
|
||||
strEnd = self.attributeDesc.brackets[1]
|
||||
else:
|
||||
raise AttributeError("Incorrect brackets on GroupAttribute: {}".format(self.attributeDesc.brackets))
|
||||
raise AttributeError(f"Incorrect brackets on GroupAttribute: {self.attributeDesc.brackets}")
|
||||
|
||||
# particular case when using space separator
|
||||
spaceSep = self.attributeDesc.joinChar == ' '
|
||||
|
@ -827,8 +827,8 @@ class GroupAttribute(Attribute):
|
|||
s = self.attributeDesc.joinChar.join(sortedSubValues)
|
||||
|
||||
if withQuotes and not spaceSep:
|
||||
return '"{}{}{}"'.format(strBegin, s, strEnd)
|
||||
return '{}{}{}'.format(strBegin, s, strEnd)
|
||||
return f'"{strBegin}{s}{strEnd}"'
|
||||
return f'{strBegin}{s}{strEnd}'
|
||||
|
||||
def updateInternals(self):
|
||||
super(GroupAttribute, self).updateInternals()
|
||||
|
|
|
@ -113,10 +113,10 @@ class Node(object):
|
|||
pass
|
||||
|
||||
def stopProcess(self, chunk):
|
||||
raise NotImplementedError('No stopProcess implementation on node: {}'.format(chunk.node.name))
|
||||
raise NotImplementedError(f'No stopProcess implementation on node: {chunk.node.name}')
|
||||
|
||||
def processChunk(self, chunk):
|
||||
raise NotImplementedError('No processChunk implementation on node: "{}"'.format(chunk.node.name))
|
||||
raise NotImplementedError(f'No processChunk implementation on node: "{chunk.node.name}"')
|
||||
|
||||
|
||||
class InputNode(Node):
|
||||
|
@ -146,7 +146,7 @@ class CommandLineNode(Node):
|
|||
# If rez available in env, we use it
|
||||
if "REZ_ENV" in os.environ and chunk.node.packageVersion:
|
||||
# If the node package is already in the environment, we don't need a new dedicated rez environment
|
||||
alreadyInEnv = os.environ.get("REZ_{}_VERSION".format(chunk.node.packageName.upper()),
|
||||
alreadyInEnv = os.environ.get(f"REZ_{chunk.node.packageName.upper()}_VERSION",
|
||||
"").startswith(chunk.node.packageVersion)
|
||||
if not alreadyInEnv:
|
||||
cmdPrefix = '{rez} {packageFullName} -- '.format(rez=os.environ.get("REZ_ENV"),
|
||||
|
@ -178,8 +178,8 @@ class CommandLineNode(Node):
|
|||
cmd = self.buildCommandLine(chunk)
|
||||
chunk.status.commandLine = cmd
|
||||
chunk.saveStatusFile()
|
||||
print(' - commandLine: {}'.format(cmd))
|
||||
print(' - logFile: {}'.format(chunk.logFile))
|
||||
print(f' - commandLine: {cmd}')
|
||||
print(f' - logFile: {chunk.logFile}')
|
||||
chunk.subprocess = psutil.Popen(shlex.split(cmd), stdout=logF, stderr=logF, cwd=chunk.node.internalFolder)
|
||||
|
||||
# Store process static info into the status file
|
||||
|
@ -195,7 +195,7 @@ class CommandLineNode(Node):
|
|||
if chunk.subprocess.returncode != 0:
|
||||
with open(chunk.logFile, 'r') as logF:
|
||||
logContent = ''.join(logF.readlines())
|
||||
raise RuntimeError('Error on node "{}":\nLog:\n{}'.format(chunk.name, logContent))
|
||||
raise RuntimeError(f'Error on node "{chunk.name}":\nLog:\n{logContent}')
|
||||
except Exception:
|
||||
raise
|
||||
finally:
|
||||
|
@ -217,12 +217,12 @@ class AVCommandLineNode(CommandLineNode):
|
|||
AVCommandLineNode.cmdMem = ''
|
||||
memSize = cgroup.getCgroupMemorySize()
|
||||
if memSize > 0:
|
||||
AVCommandLineNode.cmdMem = ' --maxMemory={memSize}'.format(memSize=memSize)
|
||||
AVCommandLineNode.cmdMem = f' --maxMemory={memSize}'
|
||||
|
||||
AVCommandLineNode.cmdCore = ''
|
||||
coresCount = cgroup.getCgroupCpuCount()
|
||||
if coresCount > 0:
|
||||
AVCommandLineNode.cmdCore = ' --maxCores={coresCount}'.format(coresCount=coresCount)
|
||||
AVCommandLineNode.cmdCore = f' --maxCores={coresCount}'
|
||||
|
||||
AVCommandLineNode.cgroupParsed = True
|
||||
|
||||
|
|
|
@ -39,9 +39,9 @@ class UnknownNodeTypeError(GraphException):
|
|||
|
||||
class NodeUpgradeError(GraphException):
|
||||
def __init__(self, nodeName, details=None):
|
||||
msg = "Failed to upgrade node {}".format(nodeName)
|
||||
msg = f"Failed to upgrade node {nodeName}"
|
||||
if details:
|
||||
msg += ": {}".format(details)
|
||||
msg += f": {details}"
|
||||
super(NodeUpgradeError, self).__init__(msg)
|
||||
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ class Edge(BaseObject):
|
|||
super(Edge, self).__init__(parent)
|
||||
self._src = weakref.ref(src)
|
||||
self._dst = weakref.ref(dst)
|
||||
self._repr = "<Edge> {} -> {}".format(self._src(), self._dst())
|
||||
self._repr = f"<Edge> {self._src()} -> {self._dst()}"
|
||||
|
||||
@property
|
||||
def src(self):
|
||||
|
@ -825,12 +825,12 @@ class Graph(BaseObject):
|
|||
def findNode(self, nodeExpr):
|
||||
candidates = self.findNodeCandidates('^' + nodeExpr)
|
||||
if not candidates:
|
||||
raise KeyError('No node candidate for "{}"'.format(nodeExpr))
|
||||
raise KeyError(f'No node candidate for "{nodeExpr}"')
|
||||
if len(candidates) > 1:
|
||||
for c in candidates:
|
||||
if c.name == nodeExpr:
|
||||
return c
|
||||
raise KeyError('Multiple node candidates for "{}": {}'.format(nodeExpr, str([c.name for c in candidates])))
|
||||
raise KeyError(f'Multiple node candidates for "{nodeExpr}": {str([c.name for c in candidates])}')
|
||||
return candidates[0]
|
||||
|
||||
def findNodes(self, nodesExpr):
|
||||
|
@ -856,7 +856,7 @@ class Graph(BaseObject):
|
|||
if srcAttr.node.graph != self or dstAttr.node.graph != self:
|
||||
raise RuntimeError('The attributes of the edge should be part of a common graph.')
|
||||
if dstAttr in self.edges.keys():
|
||||
raise RuntimeError('Destination attribute "{}" is already connected.'.format(dstAttr.getFullNameToNode()))
|
||||
raise RuntimeError(f'Destination attribute "{dstAttr.getFullNameToNode()}" is already connected.')
|
||||
edge = Edge(srcAttr, dstAttr)
|
||||
self.edges.add(edge)
|
||||
self.markNodesDirty(dstAttr.node)
|
||||
|
@ -873,7 +873,7 @@ class Graph(BaseObject):
|
|||
@changeTopology
|
||||
def removeEdge(self, dstAttr):
|
||||
if dstAttr not in self.edges.keys():
|
||||
raise RuntimeError('Attribute "{}" is not connected'.format(dstAttr.getFullNameToNode()))
|
||||
raise RuntimeError(f'Attribute "{dstAttr.getFullNameToNode()}" is not connected')
|
||||
edge = self.edges.pop(dstAttr)
|
||||
self.markNodesDirty(dstAttr.node)
|
||||
dstAttr.valueChanged.emit()
|
||||
|
@ -1608,12 +1608,11 @@ def executeGraph(graph, toNodes=None, forceCompute=False, forceStatus=False):
|
|||
node=n+1, nbNodes=len(nodes),
|
||||
chunk=c+1, nbChunks=len(node.chunks), nodeName=node.nodeType))
|
||||
else:
|
||||
print('\n[{node}/{nbNodes}] {nodeName}'.format(
|
||||
node=n + 1, nbNodes=len(nodes), nodeName=node.nodeType))
|
||||
print(f'\n[{n + 1}/{len(nodes)}] {node.nodeType}')
|
||||
chunk.process(forceCompute)
|
||||
node.postprocess()
|
||||
except Exception as e:
|
||||
logging.error("Error on node computation: {}".format(e))
|
||||
logging.error(f"Error on node computation: {e}")
|
||||
graph.clearSubmittedNodes()
|
||||
raise
|
||||
|
||||
|
@ -1630,8 +1629,8 @@ def submitGraph(graph, submitter, toNodes=None, submitLabel="{projectName}"):
|
|||
logging.warning('Nothing to compute')
|
||||
return
|
||||
|
||||
logging.info("Nodes to process: {}".format(edgesToProcess))
|
||||
logging.info("Edges to process: {}".format(edgesToProcess))
|
||||
logging.info(f"Nodes to process: {edgesToProcess}")
|
||||
logging.info(f"Edges to process: {edgesToProcess}")
|
||||
|
||||
sub = None
|
||||
if submitter:
|
||||
|
@ -1649,7 +1648,7 @@ def submitGraph(graph, submitter, toNodes=None, submitLabel="{projectName}"):
|
|||
for node in nodesToProcess:
|
||||
node.submit() # update node status
|
||||
except Exception as e:
|
||||
logging.error("Error on submit : {}".format(e))
|
||||
logging.error(f"Error on submit : {e}")
|
||||
|
||||
|
||||
def submit(graphFile, submitter, toNode=None, submitLabel="{projectName}"):
|
||||
|
|
|
@ -268,7 +268,7 @@ class NodeChunk(BaseObject):
|
|||
@property
|
||||
def name(self):
|
||||
if self.range.blockSize:
|
||||
return "{}({})".format(self.node.name, self.index)
|
||||
return f"{self.node.name}({self.index})"
|
||||
else:
|
||||
return self.node.name
|
||||
|
||||
|
@ -403,7 +403,7 @@ class NodeChunk(BaseObject):
|
|||
|
||||
def process(self, forceCompute=False):
|
||||
if not forceCompute and self._status.status == Status.SUCCESS:
|
||||
logging.info("Node chunk already computed: {}".format(self.name))
|
||||
logging.info(f"Node chunk already computed: {self.name}")
|
||||
return
|
||||
global runningProcesses
|
||||
runningProcesses[self.name] = self
|
||||
|
@ -427,7 +427,7 @@ class NodeChunk(BaseObject):
|
|||
self._status.elapsedTime = time.time() - startTime
|
||||
if exceptionStatus is not None:
|
||||
self.upgradeStatusTo(exceptionStatus)
|
||||
logging.info(" - elapsed time: {}".format(self._status.elapsedTimeStr))
|
||||
logging.info(f" - elapsed time: {self._status.elapsedTimeStr}")
|
||||
# Ask and wait for the stats thread to stop
|
||||
self.statThread.stopRequest()
|
||||
self.statThread.join()
|
||||
|
@ -582,7 +582,7 @@ class BaseNode(BaseObject):
|
|||
str: the high-level label from the technical node name
|
||||
"""
|
||||
t, idx = name.split("_")
|
||||
return "{}{}".format(t, idx if int(idx) > 1 else "")
|
||||
return f"{t}{idx if int(idx) > 1 else ''}"
|
||||
|
||||
def getDocumentation(self):
|
||||
if not self.nodeDesc:
|
||||
|
@ -739,7 +739,7 @@ class BaseNode(BaseObject):
|
|||
if group is not None:
|
||||
# If there is a valid command line "group"
|
||||
v = attr.getValueStr(withQuotes=True)
|
||||
cmdVars[name] = "--{name} {value}".format(name=name, value=v)
|
||||
cmdVars[name] = f"--{name} {v}"
|
||||
# xxValue is exposed without quotes to allow to compose expressions
|
||||
cmdVars[name + "Value"] = attr.getValueStr(withQuotes=False)
|
||||
|
||||
|
@ -810,7 +810,7 @@ class BaseNode(BaseObject):
|
|||
|
||||
v = attr.getValueStr(withQuotes=True)
|
||||
|
||||
self._cmdVars[name] = '--{name} {value}'.format(name=name, value=v)
|
||||
self._cmdVars[name] = f'--{name} {v}'
|
||||
# xxValue is exposed without quotes to allow to compose expressions
|
||||
self._cmdVars[name + 'Value'] = attr.getValueStr(withQuotes=False)
|
||||
|
||||
|
@ -1117,7 +1117,7 @@ class BaseNode(BaseObject):
|
|||
return
|
||||
valuesFile = self.valuesFile
|
||||
if not os.path.exists(valuesFile):
|
||||
logging.warning("No output attr file: {}".format(valuesFile))
|
||||
logging.warning(f"No output attr file: {valuesFile}")
|
||||
return
|
||||
|
||||
# logging.warning("load output attr: {}, value: {}".format(self.name, valuesFile))
|
||||
|
@ -1564,7 +1564,7 @@ class Node(BaseNode):
|
|||
chunk.range = range
|
||||
except RuntimeError:
|
||||
# TODO: set node internal status to error
|
||||
logging.warning("Invalid Parallelization on node {}".format(self._name))
|
||||
logging.warning(f"Invalid Parallelization on node {self._name}")
|
||||
self._chunks.clear()
|
||||
else:
|
||||
if len(self._chunks) != 1:
|
||||
|
@ -1758,7 +1758,7 @@ class CompatibilityNode(BaseNode):
|
|||
@property
|
||||
def issueDetails(self):
|
||||
if self.issue == CompatibilityIssue.UnknownNodeType:
|
||||
return "Unknown node type: '{}'.".format(self.nodeType)
|
||||
return f"Unknown node type: '{self.nodeType}'."
|
||||
elif self.issue == CompatibilityIssue.VersionConflict:
|
||||
return "Node version '{}' conflicts with current version '{}'.".format(
|
||||
self.nodeDict["version"], nodeVersion(self.nodeDesc)
|
||||
|
@ -1834,7 +1834,7 @@ class CompatibilityNode(BaseNode):
|
|||
try:
|
||||
upgradedAttrValues = node.nodeDesc.upgradeAttributeValues(attrValues, self.version)
|
||||
except Exception as e:
|
||||
logging.error("Error in the upgrade implementation of the node: {}.\n{}".format(self.name, repr(e)))
|
||||
logging.error(f"Error in the upgrade implementation of the node: {self.name}.\n{repr(e)}")
|
||||
upgradedAttrValues = attrValues
|
||||
|
||||
if not isinstance(upgradedAttrValues, dict):
|
||||
|
|
|
@ -24,8 +24,8 @@ def bytes2human(n):
|
|||
for s in reversed(symbols):
|
||||
if n >= prefix[s]:
|
||||
value = float(n) / prefix[s]
|
||||
return '%.2f %s' % (value, s)
|
||||
return '%.2f B' % (n)
|
||||
return f'{value:.2f} {s}'
|
||||
return f'{n:.2f} B'
|
||||
|
||||
|
||||
class ComputerStatistics:
|
||||
|
@ -57,7 +57,7 @@ class ComputerStatistics:
|
|||
if self.nvidia_smi is None:
|
||||
# Could not be found from the environment path,
|
||||
# try to find it from system drive with default installation path
|
||||
default_nvidia_smi = "%s\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe" % os.environ['systemdrive']
|
||||
default_nvidia_smi = f"{os.environ['systemdrive']}\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe"
|
||||
if os.path.isfile(default_nvidia_smi):
|
||||
self.nvidia_smi = default_nvidia_smi
|
||||
else:
|
||||
|
@ -84,7 +84,7 @@ class ComputerStatistics:
|
|||
self._addKV('ioCounters', psutil.disk_io_counters())
|
||||
self.updateGpu()
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get statistics: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get statistics: "{str(e)}".')
|
||||
|
||||
def updateGpu(self):
|
||||
if not self.nvidia_smi:
|
||||
|
@ -99,38 +99,38 @@ class ComputerStatistics:
|
|||
try:
|
||||
self.gpuName = gpuTree.find('product_name').text
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get gpuName: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get gpuName: "{str(e)}".')
|
||||
pass
|
||||
try:
|
||||
gpuMemoryUsed = gpuTree.find('fb_memory_usage').find('used').text.split(" ")[0]
|
||||
self._addKV('gpuMemoryUsed', gpuMemoryUsed)
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get gpuMemoryUsed: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get gpuMemoryUsed: "{str(e)}".')
|
||||
pass
|
||||
try:
|
||||
self.gpuMemoryTotal = gpuTree.find('fb_memory_usage').find('total').text.split(" ")[0]
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get gpuMemoryTotal: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get gpuMemoryTotal: "{str(e)}".')
|
||||
pass
|
||||
try:
|
||||
gpuUsed = gpuTree.find('utilization').find('gpu_util').text.split(" ")[0]
|
||||
self._addKV('gpuUsed', gpuUsed)
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get gpuUsed: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get gpuUsed: "{str(e)}".')
|
||||
pass
|
||||
try:
|
||||
gpuTemperature = gpuTree.find('temperature').find('gpu_temp').text.split(" ")[0]
|
||||
self._addKV('gpuTemperature', gpuTemperature)
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get gpuTemperature: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get gpuTemperature: "{str(e)}".')
|
||||
pass
|
||||
except subprocess.TimeoutExpired as e:
|
||||
logging.debug('Timeout when retrieving information from nvidia_smi: "{}".'.format(str(e)))
|
||||
logging.debug(f'Timeout when retrieving information from nvidia_smi: "{str(e)}".')
|
||||
p.kill()
|
||||
outs, errs = p.communicate()
|
||||
return
|
||||
except Exception as e:
|
||||
logging.debug('Failed to get information from nvidia_smi: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed to get information from nvidia_smi: "{str(e)}".')
|
||||
return
|
||||
|
||||
def toDict(self):
|
||||
|
@ -263,22 +263,22 @@ class Statistics:
|
|||
def fromDict(self, d):
|
||||
version = d.get('fileVersion', 0.0)
|
||||
if version != self.fileVersion:
|
||||
logging.debug('Statistics: file version was {} and the current version is {}.'.format(version, self.fileVersion))
|
||||
logging.debug(f'Statistics: file version was {version} and the current version is {self.fileVersion}.')
|
||||
self.computer = ComputerStatistics()
|
||||
self.process = ProcStatistics()
|
||||
self.times = []
|
||||
try:
|
||||
self.computer.fromDict(d.get('computer', {}))
|
||||
except Exception as e:
|
||||
logging.debug('Failed while loading statistics: computer: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed while loading statistics: computer: "{str(e)}".')
|
||||
try:
|
||||
self.process.fromDict(d.get('process', {}))
|
||||
except Exception as e:
|
||||
logging.debug('Failed while loading statistics: process: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed while loading statistics: process: "{str(e)}".')
|
||||
try:
|
||||
self.times = d.get('times', [])
|
||||
except Exception as e:
|
||||
logging.debug('Failed while loading statistics: times: "{}".'.format(str(e)))
|
||||
logging.debug(f'Failed while loading statistics: times: "{str(e)}".')
|
||||
|
||||
|
||||
bytesPerGiga = 1024. * 1024. * 1024.
|
||||
|
|
|
@ -69,7 +69,7 @@ class TaskThread(Thread):
|
|||
stopAndRestart = True
|
||||
break
|
||||
else:
|
||||
logging.error("Error on node computation: {}".format(e))
|
||||
logging.error(f"Error on node computation: {e}")
|
||||
nodesToRemove, _ = self._manager._graph.dfsOnDiscover(startNodes=[node], reverse=True)
|
||||
# remove following nodes from the task queue
|
||||
for n in nodesToRemove[1:]: # exclude current node
|
||||
|
@ -425,8 +425,8 @@ class TaskManager(BaseObject):
|
|||
flowEdges = graph.flowEdges(startNodes=toNodes)
|
||||
edgesToProcess = set(edgesToProcess).intersection(flowEdges)
|
||||
|
||||
logging.info("Nodes to process: {}".format(nodesToProcess))
|
||||
logging.info("Edges to process: {}".format(edgesToProcess))
|
||||
logging.info(f"Nodes to process: {nodesToProcess}")
|
||||
logging.info(f"Edges to process: {edgesToProcess}")
|
||||
|
||||
try:
|
||||
res = sub.submit(nodesToProcess, edgesToProcess, graph.filepath, submitLabel=submitLabel)
|
||||
|
@ -441,7 +441,7 @@ class TaskManager(BaseObject):
|
|||
if not allReady:
|
||||
self.raiseDependenciesMessage("SUBMITTING")
|
||||
except Exception as e:
|
||||
logging.error("Error on submit : {}".format(e))
|
||||
logging.error(f"Error on submit : {e}")
|
||||
|
||||
def submitFromFile(self, graphFile, submitter, toNode=None, submitLabel="{projectName}"):
|
||||
"""
|
||||
|
|
|
@ -54,7 +54,7 @@ def checkTemplateVersions(path: str, nodesAlreadyLoaded: bool = False) -> bool:
|
|||
break
|
||||
|
||||
if compatibilityIssue is not None:
|
||||
print("{} in {} for node {}".format(compatibilityIssue, path, nodeType))
|
||||
print(f"{compatibilityIssue} in {path} for node {nodeType}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -161,7 +161,7 @@ def mvsPipeline(graph, sfm=None):
|
|||
list of Node: the created nodes
|
||||
"""
|
||||
if sfm and not sfm.nodeType == "StructureFromMotion":
|
||||
raise ValueError("Invalid node type. Expected StructureFromMotion, got {}.".format(sfm.nodeType))
|
||||
raise ValueError(f"Invalid node type. Expected StructureFromMotion, got {sfm.nodeType}.")
|
||||
|
||||
prepareDenseScene = graph.addNewNode('PrepareDenseScene',
|
||||
input=sfm.output if sfm else "")
|
||||
|
|
|
@ -71,7 +71,7 @@ This node allows to copy files into a specific folder.
|
|||
if not outFiles:
|
||||
error = 'Publish: input files listed, but nothing to publish'
|
||||
chunk.logger.error(error)
|
||||
chunk.logger.info('Listed input files: {}'.format([i.value for i in chunk.node.inputFiles.value]))
|
||||
chunk.logger.info(f'Listed input files: {[i.value for i in chunk.node.inputFiles.value]}')
|
||||
raise RuntimeError(error)
|
||||
|
||||
if not os.path.exists(chunk.node.output.value):
|
||||
|
@ -79,10 +79,10 @@ This node allows to copy files into a specific folder.
|
|||
|
||||
for iFile, oFile in outFiles.items():
|
||||
if os.path.isdir(iFile): # If the input is a directory, copy the directory's content
|
||||
chunk.logger.info('Publish directory {} into {}'.format(iFile, oFile))
|
||||
chunk.logger.info(f'Publish directory {iFile} into {oFile}')
|
||||
du.copy_tree(iFile, oFile)
|
||||
else:
|
||||
chunk.logger.info('Publish file {} into {}'.format(iFile, oFile))
|
||||
chunk.logger.info(f'Publish file {iFile} into {oFile}')
|
||||
shutil.copyfile(iFile, oFile)
|
||||
chunk.logger.info('Publish end')
|
||||
finally:
|
||||
|
|
|
@ -45,7 +45,7 @@ class RippleSubmitter(BaseSubmitter):
|
|||
}
|
||||
|
||||
#Specify some constraints
|
||||
requirements = "!\"rs*\",@.mem>25{gpu}".format(gpu=gpudict[node.nodeDesc.gpu.name])
|
||||
requirements = f"!\"rs*\",@.mem>25{gpudict[node.nodeDesc.gpu.name]}"
|
||||
|
||||
#decide if we need multiple slots
|
||||
minProcessors = 1
|
||||
|
@ -67,7 +67,7 @@ class RippleSubmitter(BaseSubmitter):
|
|||
waitsFor.append(parent.name)
|
||||
|
||||
#Basic command line for this node
|
||||
command='meshroom_compute --node {nodeName} "{meshroomFile}" --extern'.format(nodeName=node.name, meshroomFile=meshroomFile)
|
||||
command=f'meshroom_compute --node {node.name} "{meshroomFile}" --extern'
|
||||
|
||||
if node.isParallelized:
|
||||
_, _, nbBlocks = node.nodeDesc.parallelization.getSizes(node)
|
||||
|
@ -77,13 +77,13 @@ class RippleSubmitter(BaseSubmitter):
|
|||
for iteration in range(0, nbBlocks):
|
||||
|
||||
#Add iteration number
|
||||
commandext = '{cmd} --iteration {iter}'.format(cmd=command, iter=iteration)
|
||||
commandext = f'{command} --iteration {iteration}'
|
||||
|
||||
#Create process task with parameters
|
||||
rippleproc = RippleProcessWithSlots(name='{name} iteration {iter}'.format(name=node.name, iter=iteration), discipline='ripple', appendKeys=True, keys=requirements, label=node.name, cmdList=[commandext], waitsFor=waitsFor, minProcessors=minProcessors, maxProcessors=maxProcessors)
|
||||
rippleproc = RippleProcessWithSlots(name=f'{node.name} iteration {iteration}', discipline='ripple', appendKeys=True, keys=requirements, label=node.name, cmdList=[commandext], waitsFor=waitsFor, minProcessors=minProcessors, maxProcessors=maxProcessors)
|
||||
rippleprocs.append(rippleproc)
|
||||
|
||||
rippleObj = RippleGroup(label="{name} Group".format(name=node.name), tasks=rippleprocs, name=node.name, waitsFor=waitsFor)
|
||||
rippleObj = RippleGroup(label=f"{node.name} Group", tasks=rippleprocs, name=node.name, waitsFor=waitsFor)
|
||||
else:
|
||||
rippleObj = RippleProcessWithSlots(name=node.name, discipline='ripple', appendKeys=True, keys=requirements, label=node.name, cmdList=[command], waitsFor=waitsFor, minProcessors=minProcessors, maxProcessors=maxProcessors)
|
||||
|
||||
|
|
|
@ -46,9 +46,9 @@ class SimpleFarmSubmitter(BaseSubmitter):
|
|||
continue
|
||||
v = p.split('-')
|
||||
self.reqPackages.append('-'.join([v[0], resolvedVersions[v[0]]]))
|
||||
logging.debug('REZ Packages: {}'.format(str(self.reqPackages)))
|
||||
logging.debug(f'REZ Packages: {str(self.reqPackages)}')
|
||||
elif 'REZ_MESHROOM_VERSION' in os.environ:
|
||||
self.reqPackages = ["meshroom-{}".format(os.environ.get('REZ_MESHROOM_VERSION', ''))]
|
||||
self.reqPackages = [f"meshroom-{os.environ.get('REZ_MESHROOM_VERSION', '')}"]
|
||||
else:
|
||||
self.reqPackages = None
|
||||
|
||||
|
|
|
@ -379,13 +379,13 @@ class MeshroomApp(QApplication):
|
|||
return viewpoints[0].get("path", "")
|
||||
|
||||
except FileNotFoundError:
|
||||
logging.info("File {} not found on disk.".format(filepath))
|
||||
logging.info(f"File {filepath} not found on disk.")
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
logging.info("Error while loading file {}.".format(filepath))
|
||||
logging.info(f"Error while loading file {filepath}.")
|
||||
except KeyError as err:
|
||||
logging.info("The following key does not exist: {}".format(str(err)))
|
||||
logging.info(f"The following key does not exist: {str(err)}")
|
||||
except Exception as err:
|
||||
logging.info("Exception: {}".format(str(err)))
|
||||
logging.info(f"Exception: {str(err)}")
|
||||
|
||||
return ""
|
||||
|
||||
|
@ -443,7 +443,7 @@ class MeshroomApp(QApplication):
|
|||
projectFile (str or QUrl): path to the project file to add to the list
|
||||
"""
|
||||
if not isinstance(projectFile, (QUrl, str)):
|
||||
raise TypeError("Unexpected data type: {}".format(projectFile.__class__))
|
||||
raise TypeError(f"Unexpected data type: {projectFile.__class__}")
|
||||
if isinstance(projectFile, QUrl):
|
||||
projectFileNorm = projectFile.toLocalFile()
|
||||
if not projectFileNorm:
|
||||
|
@ -495,7 +495,7 @@ class MeshroomApp(QApplication):
|
|||
Otherwise, it is effectively removed and the QSettings are updated accordingly.
|
||||
"""
|
||||
if not isinstance(projectFile, (QUrl, str)):
|
||||
raise TypeError("Unexpected data type: {}".format(projectFile.__class__))
|
||||
raise TypeError(f"Unexpected data type: {projectFile.__class__}")
|
||||
if isinstance(projectFile, QUrl):
|
||||
projectFileNorm = projectFile.toLocalFile()
|
||||
if not projectFileNorm:
|
||||
|
@ -551,7 +551,7 @@ class MeshroomApp(QApplication):
|
|||
if not folderPath:
|
||||
folderPath = imagesFolder.toString()
|
||||
else:
|
||||
raise TypeError("Unexpected data type: {}".format(imagesFolder.__class__))
|
||||
raise TypeError(f"Unexpected data type: {imagesFolder.__class__}")
|
||||
|
||||
folders = self._recentImportedImagesFolders()
|
||||
|
||||
|
@ -586,7 +586,7 @@ class MeshroomApp(QApplication):
|
|||
if not folderPath:
|
||||
folderPath = imagesFolder.toString()
|
||||
else:
|
||||
raise TypeError("Unexpected data type: {}".format(imagesFolder.__class__))
|
||||
raise TypeError(f"Unexpected data type: {imagesFolder.__class__}")
|
||||
|
||||
folders = self._recentImportedImagesFolders()
|
||||
|
||||
|
@ -633,9 +633,9 @@ class MeshroomApp(QApplication):
|
|||
import platform
|
||||
import sys
|
||||
return {
|
||||
'platform': '{} {}'.format(platform.system(), platform.release()),
|
||||
'python': 'Python {}'.format(sys.version.split(" ")[0]),
|
||||
'pyside': 'PySide6 {}'.format(PySideVersion)
|
||||
'platform': f'{platform.system()} {platform.release()}',
|
||||
'python': f"Python {sys.version.split(' ')[0]}",
|
||||
'pyside': f'PySide6 {PySideVersion}'
|
||||
}
|
||||
|
||||
systemInfo = Property(QJsonValue, _systemInfo, constant=True)
|
||||
|
|
|
@ -26,7 +26,7 @@ class UndoCommand(QUndoCommand):
|
|||
try:
|
||||
self.redoImpl()
|
||||
except Exception:
|
||||
logging.error("Error while redoing command '{}': \n{}".format(self.text(), traceback.format_exc()))
|
||||
logging.error(f"Error while redoing command '{self.text()}': \n{traceback.format_exc()}")
|
||||
|
||||
def undo(self):
|
||||
if not self._enabled:
|
||||
|
@ -34,7 +34,7 @@ class UndoCommand(QUndoCommand):
|
|||
try:
|
||||
self.undoImpl()
|
||||
except Exception:
|
||||
logging.error("Error while undoing command '{}': \n{}".format(self.text(), traceback.format_exc()))
|
||||
logging.error(f"Error while undoing command '{self.text()}': \n{traceback.format_exc()}")
|
||||
|
||||
def redoImpl(self):
|
||||
# type: () -> bool
|
||||
|
@ -64,7 +64,7 @@ class UndoStack(QUndoStack):
|
|||
try:
|
||||
res = command.redoImpl()
|
||||
except Exception as e:
|
||||
logging.error("Error while trying command '{}': \n{}".format(command.text(), traceback.format_exc()))
|
||||
logging.error(f"Error while trying command '{command.text()}': \n{traceback.format_exc()}")
|
||||
res = False
|
||||
if res is not False:
|
||||
command.setEnabled(False)
|
||||
|
@ -144,7 +144,7 @@ class AddNodeCommand(GraphCommand):
|
|||
def redoImpl(self):
|
||||
node = self.graph.addNewNode(self.nodeType, position=self.position, **self.kwargs)
|
||||
self.nodeName = node.name
|
||||
self.setText("Add Node {}".format(self.nodeName))
|
||||
self.setText(f"Add Node {self.nodeName}")
|
||||
return node
|
||||
|
||||
def undoImpl(self):
|
||||
|
@ -156,7 +156,7 @@ class RemoveNodeCommand(GraphCommand):
|
|||
super(RemoveNodeCommand, self).__init__(graph, parent)
|
||||
self.nodeDict = node.toDict()
|
||||
self.nodeName = node.getName()
|
||||
self.setText("Remove Node {}".format(self.nodeName))
|
||||
self.setText(f"Remove Node {self.nodeName}")
|
||||
self.outEdges = {}
|
||||
self.outListAttributes = {} # maps attribute's key with a tuple containing the name of the list it is connected to and its value
|
||||
|
||||
|
@ -221,7 +221,7 @@ class PasteNodesCommand(GraphCommand):
|
|||
nodes = self.graph.importGraphContent(graph)
|
||||
|
||||
self.nodeNames = [node.name for node in nodes]
|
||||
self.setText("Paste Node{} ({})".format("s" if len(self.nodeNames) > 1 else "", ", ".join(self.nodeNames)))
|
||||
self.setText(f"Paste Node{'s' if len(self.nodeNames) > 1 else ''} ({', '.join(self.nodeNames)})")
|
||||
return nodes
|
||||
|
||||
def undoImpl(self):
|
||||
|
@ -289,7 +289,7 @@ class SetAttributeCommand(GraphCommand):
|
|||
self.attrName = attribute.getFullNameToNode()
|
||||
self.value = value
|
||||
self.oldValue = attribute.getExportValue()
|
||||
self.setText("Set Attribute '{}'".format(attribute.getFullNameToNode()))
|
||||
self.setText(f"Set Attribute '{attribute.getFullNameToNode()}'")
|
||||
|
||||
def redoImpl(self):
|
||||
if self.value == self.oldValue:
|
||||
|
@ -312,10 +312,10 @@ class AddEdgeCommand(GraphCommand):
|
|||
super(AddEdgeCommand, self).__init__(graph, parent)
|
||||
self.srcAttr = src.getFullNameToNode()
|
||||
self.dstAttr = dst.getFullNameToNode()
|
||||
self.setText("Connect '{}'->'{}'".format(self.srcAttr, self.dstAttr))
|
||||
self.setText(f"Connect '{self.srcAttr}'->'{self.dstAttr}'")
|
||||
|
||||
if src.baseType != dst.baseType:
|
||||
raise ValueError("Attribute types are not compatible and cannot be connected: '{}'({})->'{}'({})".format(self.srcAttr, src.baseType, self.dstAttr, dst.baseType))
|
||||
raise ValueError(f"Attribute types are not compatible and cannot be connected: '{self.srcAttr}'({src.baseType})->'{self.dstAttr}'({dst.baseType})")
|
||||
|
||||
def redoImpl(self):
|
||||
self.graph.addEdge(self.graph.attribute(self.srcAttr), self.graph.attribute(self.dstAttr))
|
||||
|
@ -330,7 +330,7 @@ class RemoveEdgeCommand(GraphCommand):
|
|||
super(RemoveEdgeCommand, self).__init__(graph, parent)
|
||||
self.srcAttr = edge.src.getFullNameToNode()
|
||||
self.dstAttr = edge.dst.getFullNameToNode()
|
||||
self.setText("Disconnect '{}'->'{}'".format(self.srcAttr, self.dstAttr))
|
||||
self.setText(f"Disconnect '{self.srcAttr}'->'{self.dstAttr}'")
|
||||
|
||||
def redoImpl(self):
|
||||
self.graph.removeEdge(self.graph.attribute(self.dstAttr))
|
||||
|
@ -349,7 +349,7 @@ class ListAttributeAppendCommand(GraphCommand):
|
|||
self.index = None
|
||||
self.count = 1
|
||||
self.value = value if value else None
|
||||
self.setText("Append to {}".format(self.attrName))
|
||||
self.setText(f"Append to {self.attrName}")
|
||||
|
||||
def redoImpl(self):
|
||||
listAttribute = self.graph.attribute(self.attrName)
|
||||
|
@ -374,7 +374,7 @@ class ListAttributeRemoveCommand(GraphCommand):
|
|||
self.listAttrName = listAttribute.getFullNameToNode()
|
||||
self.index = listAttribute.index(attribute)
|
||||
self.value = attribute.getExportValue()
|
||||
self.setText("Remove {}".format(attribute.getFullNameToNode()))
|
||||
self.setText(f"Remove {attribute.getFullNameToNode()}")
|
||||
|
||||
def redoImpl(self):
|
||||
listAttribute = self.graph.attribute(self.listAttrName)
|
||||
|
@ -392,7 +392,7 @@ class RemoveImagesCommand(GraphCommand):
|
|||
self.cameraInits = cameraInitNodes
|
||||
self.viewpoints = { cameraInit.name: cameraInit.attribute("viewpoints").getExportValue() for cameraInit in self.cameraInits }
|
||||
self.intrinsics = { cameraInit.name: cameraInit.attribute("intrinsics").getExportValue() for cameraInit in self.cameraInits }
|
||||
self.title = "Remove{}Images".format(" " if len(self.cameraInits) == 1 else " All ")
|
||||
self.title = f"Remove{' ' if len(self.cameraInits) == 1 else ' All '}Images"
|
||||
self.setText(self.title)
|
||||
|
||||
def redoImpl(self):
|
||||
|
@ -421,7 +421,7 @@ class MoveNodeCommand(GraphCommand):
|
|||
self.nodeName = node.name
|
||||
self.oldPosition = node.position
|
||||
self.newPosition = position
|
||||
self.setText("Move {}".format(self.nodeName))
|
||||
self.setText(f"Move {self.nodeName}")
|
||||
|
||||
def redoImpl(self):
|
||||
self.graph.node(self.nodeName).position = self.newPosition
|
||||
|
@ -440,7 +440,7 @@ class UpgradeNodeCommand(GraphCommand):
|
|||
self.nodeDict = node.toDict()
|
||||
self.nodeName = node.getName()
|
||||
self.compatibilityIssue = None
|
||||
self.setText("Upgrade Node {}".format(self.nodeName))
|
||||
self.setText(f"Upgrade Node {self.nodeName}")
|
||||
|
||||
def redoImpl(self):
|
||||
if not (node := self.graph.node(self.nodeName)).canUpgrade:
|
||||
|
|
|
@ -77,7 +77,7 @@ class CsvData(QObject):
|
|||
for idx, value in enumerate(elt):
|
||||
dataList[idx].appendValue(value)
|
||||
except Exception as e:
|
||||
logging.error("CsvData: Failed to load file: {}\n{}".format(self._filepath, str(e)))
|
||||
logging.error(f"CsvData: Failed to load file: {self._filepath}\n{str(e)}")
|
||||
|
||||
return dataList
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ class FilepathHelper(QObject):
|
|||
str: String representation of 'path'
|
||||
"""
|
||||
if not isinstance(path, (QUrl, str)):
|
||||
raise TypeError("Unexpected data type: {}".format(path.__class__))
|
||||
raise TypeError(f"Unexpected data type: {path.__class__}")
|
||||
if isinstance(path, QUrl):
|
||||
path = path.toLocalFile()
|
||||
return path
|
||||
|
|
|
@ -836,14 +836,14 @@ class UIGraph(QObject):
|
|||
if isinstance(src, ListAttribute) and not isinstance(dst, ListAttribute):
|
||||
self._addEdge(src.at(0), dst)
|
||||
elif isinstance(dst, ListAttribute) and not isinstance(src, ListAttribute):
|
||||
with self.groupedGraphModification("Insert and Add Edge on {}".format(dst.getFullNameToNode())):
|
||||
with self.groupedGraphModification(f"Insert and Add Edge on {dst.getFullNameToNode()}"):
|
||||
self.appendAttribute(dst)
|
||||
self._addEdge(src, dst.at(-1))
|
||||
else:
|
||||
self._addEdge(src, dst)
|
||||
|
||||
def _addEdge(self, src, dst):
|
||||
with self.groupedGraphModification("Connect '{}'->'{}'".format(src.getFullNameToNode(), dst.getFullNameToNode())):
|
||||
with self.groupedGraphModification(f"Connect '{src.getFullNameToNode()}'->'{dst.getFullNameToNode()}'"):
|
||||
if dst in self._graph.edges.keys():
|
||||
self.removeEdge(self._graph.edge(dst))
|
||||
self.push(commands.AddEdgeCommand(self._graph, src, dst))
|
||||
|
@ -851,7 +851,7 @@ class UIGraph(QObject):
|
|||
@Slot(Edge)
|
||||
def removeEdge(self, edge):
|
||||
if isinstance(edge.dst.root, ListAttribute):
|
||||
with self.groupedGraphModification("Remove Edge and Delete {}".format(edge.dst.getFullNameToNode())):
|
||||
with self.groupedGraphModification(f"Remove Edge and Delete {edge.dst.getFullNameToNode()}"):
|
||||
self.push(commands.RemoveEdgeCommand(self._graph, edge))
|
||||
self.removeAttribute(edge.dst)
|
||||
else:
|
||||
|
@ -859,7 +859,7 @@ class UIGraph(QObject):
|
|||
|
||||
@Slot(Edge, Attribute, Attribute, result=Edge)
|
||||
def replaceEdge(self, edge, newSrc, newDst):
|
||||
with self.groupedGraphModification("Replace Edge '{}'->'{}' with '{}'->'{}'".format(edge.src.getFullNameToNode(), edge.dst.getFullNameToNode(), newSrc.getFullNameToNode(), newDst.getFullNameToNode())):
|
||||
with self.groupedGraphModification(f"Replace Edge '{edge.src.getFullNameToNode()}'->'{edge.dst.getFullNameToNode()}' with '{newSrc.getFullNameToNode()}'->'{newDst.getFullNameToNode()}'"):
|
||||
self.removeEdge(edge)
|
||||
self.addEdge(newSrc, newDst)
|
||||
return self._graph.edge(newDst)
|
||||
|
@ -875,7 +875,7 @@ class UIGraph(QObject):
|
|||
@Slot(Attribute)
|
||||
def resetAttribute(self, attribute):
|
||||
""" Reset 'attribute' to its default value """
|
||||
with self.groupedGraphModification("Reset Attribute '{}'".format(attribute.name)):
|
||||
with self.groupedGraphModification(f"Reset Attribute '{attribute.name}'"):
|
||||
# if the attribute is a ListAttribute, remove all edges
|
||||
if isinstance(attribute, ListAttribute):
|
||||
for edge in self._graph.edges:
|
||||
|
|
|
@ -54,5 +54,5 @@ with open(os.path.join(args.output, 'MaterialIcons.qml'), 'w') as qml_file:
|
|||
name = name + str(index)
|
||||
|
||||
names.append(name)
|
||||
qml_file.write(' readonly property string {}: "\\u{}"\n'.format(name, code))
|
||||
qml_file.write(f' readonly property string {name}: "\\u{code}\"\n')
|
||||
qml_file.write('}\n')
|
||||
|
|
|
@ -80,7 +80,7 @@ class LiveSfmManager(QObject):
|
|||
"""
|
||||
# print('[LiveSfmManager] Watching {} for images'.format(folder))
|
||||
if not os.path.isdir(folder):
|
||||
raise RuntimeError("Invalid folder provided: {}".format(folder))
|
||||
raise RuntimeError(f"Invalid folder provided: {folder}")
|
||||
self._folder = folder
|
||||
self.folderChanged.emit()
|
||||
self.cameraInit = self.sfm = None
|
||||
|
@ -225,7 +225,7 @@ class ViewpointWrapper(QObject):
|
|||
# When the viewpoint attribute has already been deleted, metadata.value becomes a PySide property (whereas a string is expected)
|
||||
self._metadata = json.loads(self._viewpoint.metadata.value) if isinstance(self._viewpoint.metadata.value, str) and self._viewpoint.metadata.value else None
|
||||
except Exception as e:
|
||||
logging.warning("Failed to parse Viewpoint metadata: '{}', '{}'".format(str(e), str(self._viewpoint.metadata.value)))
|
||||
logging.warning(f"Failed to parse Viewpoint metadata: '{str(e)}', '{str(self._viewpoint.metadata.value)}'")
|
||||
self._metadata = {}
|
||||
if not self._metadata:
|
||||
self._metadata = {}
|
||||
|
@ -567,22 +567,22 @@ class Reconstruction(UIGraph):
|
|||
self.error.emit(
|
||||
Message(
|
||||
"No Such File",
|
||||
"Error While Loading '{}': No Such File.".format(os.path.basename(filepath)),
|
||||
f"Error While Loading '{os.path.basename(filepath)}': No Such File.",
|
||||
""
|
||||
)
|
||||
)
|
||||
logging.error("Error while loading '{}': No Such File.".format(filepath))
|
||||
logging.error(f"Error while loading '{filepath}': No Such File.")
|
||||
except Exception:
|
||||
import traceback
|
||||
trace = traceback.format_exc()
|
||||
self.error.emit(
|
||||
Message(
|
||||
"Error While Loading Project File",
|
||||
"An unexpected error has occurred while loading file: '{}'".format(os.path.basename(filepath)),
|
||||
f"An unexpected error has occurred while loading file: '{os.path.basename(filepath)}'",
|
||||
trace
|
||||
)
|
||||
)
|
||||
logging.error("Error while loading '{}'.".format(filepath))
|
||||
logging.error(f"Error while loading '{filepath}'.")
|
||||
logging.error(trace)
|
||||
|
||||
return False
|
||||
|
@ -805,9 +805,9 @@ class Reconstruction(UIGraph):
|
|||
keyframeNode = self.addNewNode("KeyframeSelection", position=p)
|
||||
keyframeNode.inputPaths.value = filesByType["videos"]
|
||||
if len(filesByType["videos"]) == 1:
|
||||
newVideoNodeMessage = "New node '{}' added for the input video.".format(keyframeNode.getLabel())
|
||||
newVideoNodeMessage = f"New node '{keyframeNode.getLabel()}' added for the input video."
|
||||
else:
|
||||
newVideoNodeMessage = "New node '{}' added for a rig of {} synchronized cameras.".format(keyframeNode.getLabel(), len(filesByType["videos"]))
|
||||
newVideoNodeMessage = f"New node '{keyframeNode.getLabel()}' added for a rig of {len(filesByType['videos'])} synchronized cameras."
|
||||
self.info.emit(
|
||||
Message(
|
||||
"Video Input",
|
||||
|
@ -836,13 +836,13 @@ class Reconstruction(UIGraph):
|
|||
Message(
|
||||
"Panorama XML",
|
||||
"XML file declared on PanoramaInit node",
|
||||
"XML file '{}' set on node '{}'".format(','.join(filesByType["panoramaInfo"]), ','.join([n.getLabel() for n in panoramaInitNodes])),
|
||||
f"XML file '{','.join(filesByType['panoramaInfo'])}' set on node '{','.join([n.getLabel() for n in panoramaInitNodes])}'",
|
||||
))
|
||||
else:
|
||||
self.error.emit(
|
||||
Message(
|
||||
"No PanoramaInit Node",
|
||||
"No PanoramaInit Node to set the Panorama file:\n'{}'.".format(','.join(filesByType["panoramaInfo"])),
|
||||
f"No PanoramaInit Node to set the Panorama file:\n'{','.join(filesByType['panoramaInfo'])}'.",
|
||||
"",
|
||||
))
|
||||
|
||||
|
@ -865,7 +865,7 @@ class Reconstruction(UIGraph):
|
|||
self.error.emit(
|
||||
Message(
|
||||
"No Recognized Input File",
|
||||
"No recognized input file in the {} dropped files".format(len(filesByType["other"])),
|
||||
f"No recognized input file in the {len(filesByType['other'])} dropped files",
|
||||
"Unknown file extensions: " + ', '.join(extensions)
|
||||
)
|
||||
)
|
||||
|
@ -974,7 +974,7 @@ class Reconstruction(UIGraph):
|
|||
# Retrieve the list of updated viewpoints and intrinsics
|
||||
views, intrinsics = cameraInitCopy.nodeDesc.buildIntrinsics(cameraInitCopy, additionalViews)
|
||||
except Exception as e:
|
||||
logging.error("Error while building intrinsics: {}".format(str(e)))
|
||||
logging.error(f"Error while building intrinsics: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
# Delete the duplicate
|
||||
|
@ -1008,7 +1008,7 @@ class Reconstruction(UIGraph):
|
|||
commandTitle = "Augment Reconstruction ({} Images)"
|
||||
|
||||
if rebuild:
|
||||
commandTitle = "Rebuild '{}' Intrinsics".format(cameraInit.label)
|
||||
commandTitle = f"Rebuild '{cameraInit.label}' Intrinsics"
|
||||
|
||||
# No additional views: early return
|
||||
if not views:
|
||||
|
|
|
@ -102,7 +102,7 @@ class QmlInstantEngine(QQmlApplicationEngine):
|
|||
|
||||
# Make sure the file exists
|
||||
if not os.path.isfile(filename):
|
||||
raise ValueError("addFile: file %s doesn't exist." % filename)
|
||||
raise ValueError(f"addFile: file {filename} doesn't exist.")
|
||||
|
||||
# Return if the file is already in our internal list
|
||||
if filename in self._watchedFiles:
|
||||
|
@ -135,7 +135,7 @@ class QmlInstantEngine(QQmlApplicationEngine):
|
|||
recursive -- if True, will search inside each subdirectories recursively.
|
||||
"""
|
||||
if not os.path.isdir(dirname):
|
||||
raise RuntimeError("addFilesFromDirectory : %s is not a valid directory." % dirname)
|
||||
raise RuntimeError(f"addFilesFromDirectory : {dirname} is not a valid directory.")
|
||||
|
||||
if recursive:
|
||||
for dirpath, dirnames, filenames in os.walk(dirname):
|
||||
|
@ -193,7 +193,7 @@ class QmlInstantEngine(QQmlApplicationEngine):
|
|||
QTimer.singleShot(200, lambda: self.addFile(filepath))
|
||||
|
||||
def reload(self):
|
||||
print("Reloading {}".format(self._sourceFile))
|
||||
print(f"Reloading {self._sourceFile}")
|
||||
self.load(self._sourceFile)
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue