mirror of
https://github.com/alicevision/Meshroom.git
synced 2025-04-29 10:17:27 +02:00
Merge remote-tracking branch 'origin/develop' into dev_ml
This commit is contained in:
commit
5ae65a8cfa
32 changed files with 3206 additions and 542 deletions
1
.github/stale.yml
vendored
1
.github/stale.yml
vendored
|
@ -8,6 +8,7 @@ exemptLabels:
|
|||
- "feature request"
|
||||
- "scope:doc"
|
||||
- "new feature"
|
||||
- "bug"
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
|
|
|
@ -9,8 +9,10 @@ meshroom.setupEnvironment()
|
|||
|
||||
import meshroom.core.graph
|
||||
from meshroom import multiview
|
||||
from meshroom.core.desc import InitNode
|
||||
import logging
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Launch the full photogrammetry or Panorama HDR pipeline.')
|
||||
parser.add_argument('-i', '--input', metavar='SFM/FOLDERS/IMAGES', type=str, nargs='*',
|
||||
default=[],
|
||||
|
@ -20,8 +22,8 @@ parser.add_argument('-I', '--inputRecursive', metavar='FOLDERS/IMAGES', type=str
|
|||
default=[],
|
||||
help='Input folders containing all images recursively.')
|
||||
|
||||
parser.add_argument('-p', '--pipeline', metavar='photogrammetry/panoramaHdr/panoramaFisheyeHdr/cameraTracking/photogrammetryDraft/MG_FILE', type=str, default='photogrammetry',
|
||||
help='"photogrammetry", "panoramaHdr", "panoramaFisheyeHdr", "cameraTracking", "photogrammetryDraft" pipeline or a Meshroom file containing a custom pipeline to run on input images. '
|
||||
parser.add_argument('-p', '--pipeline', metavar='FILE.mg/' + '/'.join(meshroom.core.pipelineTemplates), type=str, default='photogrammetry',
|
||||
help='Template pipeline among those listed or a Meshroom file containing a custom pipeline to run on input images. '
|
||||
'Requirements: the graph must contain one CameraInit node, '
|
||||
'and one Publish node if --output is set.')
|
||||
|
||||
|
@ -94,61 +96,38 @@ def getOnlyNodeOfType(g, nodeType):
|
|||
return nodes[0]
|
||||
|
||||
|
||||
def getInitNode(g):
|
||||
"""
|
||||
Helper function to get the Init node in the graph 'g' and raise an exception if there is no or
|
||||
multiple candidates.
|
||||
"""
|
||||
nodes = g.findInitNodes()
|
||||
if len(nodes) == 0:
|
||||
raise RuntimeError("meshroom_batch requires an Init node in the pipeline.")
|
||||
elif len(nodes) > 1:
|
||||
raise RuntimeError("meshroom_batch requires exactly one Init node in the pipeline, {} found: {}"
|
||||
.format(len(nodes), str(nodes)))
|
||||
return nodes[0]
|
||||
|
||||
|
||||
if not args.input and not args.inputRecursive:
|
||||
print('Nothing to compute. You need to set --input or --inputRecursive.')
|
||||
sys.exit(1)
|
||||
|
||||
views, intrinsics = [], []
|
||||
# Build image files list from inputImages arguments
|
||||
filesByType = multiview.FilesByType()
|
||||
|
||||
hasSearchedForImages = False
|
||||
|
||||
if args.input:
|
||||
if len(args.input) == 1 and os.path.isfile(args.input[0]) and os.path.splitext(args.input[0])[-1] in ('.json', '.sfm'):
|
||||
# args.input is a sfmData file: setup pre-calibrated views and intrinsics
|
||||
from meshroom.nodes.aliceVision.CameraInit import readSfMData
|
||||
views, intrinsics = readSfMData(args.input[0])
|
||||
else:
|
||||
filesByType.extend(multiview.findFilesByTypeInFolder(args.input, recursive=False))
|
||||
hasSearchedForImages = True
|
||||
|
||||
if args.inputRecursive:
|
||||
filesByType.extend(multiview.findFilesByTypeInFolder(args.inputRecursive, recursive=True))
|
||||
hasSearchedForImages = True
|
||||
|
||||
if hasSearchedForImages and not filesByType.images:
|
||||
print("No image found")
|
||||
sys.exit(-1)
|
||||
|
||||
graph = multiview.Graph(name=args.pipeline)
|
||||
|
||||
with multiview.GraphModification(graph):
|
||||
# initialize photogrammetry pipeline
|
||||
if args.pipeline.lower() == "photogrammetry":
|
||||
# default photogrammetry pipeline
|
||||
multiview.photogrammetry(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
elif args.pipeline.lower() == "panoramahdr":
|
||||
# default panorama Hdr pipeline
|
||||
multiview.panoramaHdr(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
elif args.pipeline.lower() == "panoramafisheyehdr":
|
||||
# default panorama Fisheye Hdr pipeline
|
||||
multiview.panoramaFisheyeHdr(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
elif args.pipeline.lower() == "cameratracking":
|
||||
# default panorama Fisheye Hdr pipeline
|
||||
multiview.cameraTracking(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
# initialize template pipeline
|
||||
loweredPipelineTemplates = dict((k.lower(), v) for k, v in meshroom.core.pipelineTemplates.items())
|
||||
if args.pipeline.lower() in loweredPipelineTemplates:
|
||||
graph.load(loweredPipelineTemplates[args.pipeline.lower()], setupProjectFile=False)
|
||||
else:
|
||||
# custom pipeline
|
||||
graph.load(args.pipeline)
|
||||
# graph.update()
|
||||
graph.load(args.pipeline, setupProjectFile=False)
|
||||
|
||||
cameraInit = getOnlyNodeOfType(graph, 'CameraInit')
|
||||
# reset graph inputs
|
||||
cameraInit.viewpoints.resetValue()
|
||||
cameraInit.intrinsics.resetValue()
|
||||
# add views and intrinsics (if any) read from args.input
|
||||
cameraInit.viewpoints.extend(views)
|
||||
cameraInit.intrinsics.extend(intrinsics)
|
||||
# get init node and initialize it
|
||||
initNode = getInitNode(graph)
|
||||
initNode.nodeDesc.initialize(initNode, args.input, args.inputRecursive)
|
||||
|
||||
if not graph.canComputeLeaves:
|
||||
raise RuntimeError("Graph cannot be computed. Check for compatibility issues.")
|
||||
|
@ -160,11 +139,6 @@ with multiview.GraphModification(graph):
|
|||
publish = getOnlyNodeOfType(graph, 'Publish')
|
||||
publish.output.value = args.output
|
||||
|
||||
if filesByType.images:
|
||||
views, intrinsics = cameraInit.nodeDesc.buildIntrinsics(cameraInit, filesByType.images)
|
||||
cameraInit.viewpoints.value = views
|
||||
cameraInit.intrinsics.value = intrinsics
|
||||
|
||||
if args.overrides:
|
||||
import io
|
||||
import json
|
||||
|
|
|
@ -35,6 +35,7 @@ cacheFolderName = 'MeshroomCache'
|
|||
defaultCacheFolder = os.environ.get('MESHROOM_CACHE', os.path.join(tempfile.gettempdir(), cacheFolderName))
|
||||
nodesDesc = {}
|
||||
submitters = {}
|
||||
pipelineTemplates = {}
|
||||
|
||||
|
||||
def hashValue(value):
|
||||
|
@ -256,7 +257,7 @@ def loadAllNodes(folder):
|
|||
nodeTypes = loadNodes(folder, package)
|
||||
for nodeType in nodeTypes:
|
||||
registerNodeType(nodeType)
|
||||
logging.debug('Plugins loaded: ', ', '.join([nodeType.__name__ for nodeType in nodeTypes]))
|
||||
logging.debug('Nodes loaded [{}]: {}'.format(package, ', '.join([nodeType.__name__ for nodeType in nodeTypes])))
|
||||
|
||||
|
||||
def registerSubmitter(s):
|
||||
|
@ -270,6 +271,12 @@ def loadSubmitters(folder, packageName):
|
|||
return loadPlugins(folder, packageName, BaseSubmitter)
|
||||
|
||||
|
||||
def loadPipelineTemplates(folder):
|
||||
global pipelineTemplates
|
||||
for file in os.listdir(folder):
|
||||
if file.endswith(".mg") and file not in pipelineTemplates:
|
||||
pipelineTemplates[os.path.splitext(file)[0]] = os.path.join(folder, file)
|
||||
|
||||
meshroomFolder = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
additionalNodesPath = os.environ.get("MESHROOM_NODES_PATH", "").split(os.pathsep)
|
||||
|
@ -288,3 +295,12 @@ subs = loadSubmitters(os.environ.get("MESHROOM_SUBMITTERS_PATH", meshroomFolder)
|
|||
|
||||
for sub in subs:
|
||||
registerSubmitter(sub())
|
||||
|
||||
# Load pipeline templates: check in the default folder and any folder the user might have
|
||||
# added to the environment variable
|
||||
additionalPipelinesPath = os.environ.get("MESHROOM_PIPELINE_TEMPLATES_PATH", "").split(os.pathsep)
|
||||
additionalPipelinesPath = [i for i in additionalPipelinesPath if i]
|
||||
pipelineTemplatesFolders = [os.path.join(meshroomFolder, 'pipelines')] + additionalPipelinesPath
|
||||
|
||||
for f in pipelineTemplatesFolders:
|
||||
loadPipelineTemplates(f)
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import weakref
|
||||
import types
|
||||
import logging
|
||||
|
||||
from string import Template
|
||||
from meshroom.common import BaseObject, Property, Variant, Signal, ListModel, DictModel, Slot
|
||||
from meshroom.core import desc, pyCompatibility, hashValue
|
||||
|
||||
|
@ -37,7 +39,7 @@ def attributeFactory(description, value, isOutput, node, root=None, parent=None)
|
|||
class Attribute(BaseObject):
|
||||
"""
|
||||
"""
|
||||
stringIsLinkRe = re.compile('^\{[A-Za-z]+[A-Za-z0-9_.]*\}$')
|
||||
stringIsLinkRe = re.compile(r'^\{[A-Za-z]+[A-Za-z0-9_.]*\}$')
|
||||
|
||||
def __init__(self, node, attributeDesc, isOutput, root=None, parent=None):
|
||||
"""
|
||||
|
@ -139,7 +141,9 @@ class Attribute(BaseObject):
|
|||
self.enabledChanged.emit()
|
||||
|
||||
def _get_value(self):
|
||||
return self.getLinkParam().value if self.isLink else self._value
|
||||
if self.isLink:
|
||||
return self.getLinkParam().value
|
||||
return self._value
|
||||
|
||||
def _set_value(self, value):
|
||||
if self._value == value:
|
||||
|
@ -259,13 +263,18 @@ class Attribute(BaseObject):
|
|||
return self.defaultValue()
|
||||
return self._value
|
||||
|
||||
def getEvalValue(self):
|
||||
if isinstance(self.value, pyCompatibility.basestring):
|
||||
return Template(self.value).safe_substitute(os.environ)
|
||||
return self.value
|
||||
|
||||
def getValueStr(self):
|
||||
if isinstance(self.attributeDesc, desc.ChoiceParam) and not self.attributeDesc.exclusive:
|
||||
assert(isinstance(self.value, pyCompatibility.Sequence) and not isinstance(self.value, pyCompatibility.basestring))
|
||||
return self.attributeDesc.joinChar.join(self.value)
|
||||
return self.attributeDesc.joinChar.join(self.getEvalValue())
|
||||
if isinstance(self.attributeDesc, (desc.StringParam, desc.File)):
|
||||
return '"{}"'.format(self.value)
|
||||
return str(self.value)
|
||||
return '"{}"'.format(self.getEvalValue())
|
||||
return str(self.getEvalValue())
|
||||
|
||||
def defaultValue(self):
|
||||
if isinstance(self.desc.value, types.FunctionType):
|
||||
|
@ -298,6 +307,8 @@ class Attribute(BaseObject):
|
|||
desc = Property(desc.Attribute, lambda self: self.attributeDesc, constant=True)
|
||||
valueChanged = Signal()
|
||||
value = Property(Variant, _get_value, _set_value, notify=valueChanged)
|
||||
valueStr = Property(Variant, getValueStr, notify=valueChanged)
|
||||
evalValue = Property(Variant, getEvalValue, notify=valueChanged)
|
||||
isOutput = Property(bool, isOutput.fget, constant=True)
|
||||
isLinkChanged = Signal()
|
||||
isLink = Property(bool, isLink.fget, notify=isLinkChanged)
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
from meshroom.common import BaseObject, Property, Variant, VariantList, JSValue
|
||||
from meshroom.core import pyCompatibility
|
||||
|
||||
from enum import Enum # available by default in python3. For python2: "pip install enum34"
|
||||
import math
|
||||
import os
|
||||
import psutil
|
||||
import ast
|
||||
import distutils.util
|
||||
|
||||
import shlex
|
||||
|
||||
class Attribute(BaseObject):
|
||||
"""
|
||||
|
@ -505,7 +506,7 @@ class CommandLineNode(Node):
|
|||
chunk.saveStatusFile()
|
||||
print(' - commandLine: {}'.format(cmd))
|
||||
print(' - logFile: {}'.format(chunk.logFile))
|
||||
chunk.subprocess = psutil.Popen(cmd, stdout=logF, stderr=logF, shell=True)
|
||||
chunk.subprocess = psutil.Popen(shlex.split(cmd), stdout=logF, stderr=logF)
|
||||
|
||||
# store process static info into the status file
|
||||
# chunk.status.env = node.proc.environ()
|
||||
|
@ -526,3 +527,55 @@ class CommandLineNode(Node):
|
|||
finally:
|
||||
chunk.subprocess = None
|
||||
|
||||
|
||||
# Test abstract node
|
||||
class InitNode:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def initialize(self, node, inputs, recursiveInputs):
|
||||
"""
|
||||
Initialize the attributes that are needed for a node to start running.
|
||||
|
||||
Args:
|
||||
node (Node): the node whose attributes must be initialized
|
||||
inputs (list): the user-provided list of input files/directories
|
||||
recursiveInputs (list): the user-provided list of input directories to search recursively for images
|
||||
"""
|
||||
pass
|
||||
|
||||
def resetAttributes(self, node, attributeNames):
|
||||
"""
|
||||
Reset the values of the provided attributes for a node.
|
||||
|
||||
Args:
|
||||
node (Node): the node whose attributes are to be reset
|
||||
attributeNames (list): the list containing the names of the attributes to reset
|
||||
"""
|
||||
for attrName in attributeNames:
|
||||
if node.hasAttribute(attrName):
|
||||
node.attribute(attrName).resetValue()
|
||||
|
||||
def extendAttributes(self, node, attributesDict):
|
||||
"""
|
||||
Extend the values of the provided attributes for a node.
|
||||
|
||||
Args:
|
||||
node (Node): the node whose attributes are to be extended
|
||||
attributesDict (dict): the dictionary containing the attributes' names (as keys) and the values to extend with
|
||||
"""
|
||||
for attr in attributesDict.keys():
|
||||
if node.hasAttribute(attr):
|
||||
node.attribute(attr).extend(attributesDict[attr])
|
||||
|
||||
def setAttributes(self, node, attributesDict):
|
||||
"""
|
||||
Set the values of the provided attributes for a node.
|
||||
|
||||
Args:
|
||||
node (Node): the node whose attributes are to be extended
|
||||
attributesDict (dict): the dictionary containing the attributes' names (as keys) and the values to set
|
||||
"""
|
||||
for attr in attributesDict:
|
||||
if node.hasAttribute(attr):
|
||||
node.attribute(attr).value = attributesDict[attr]
|
||||
|
|
|
@ -236,8 +236,6 @@ class Graph(BaseObject):
|
|||
@property
|
||||
def fileFeatures(self):
|
||||
""" Get loaded file supported features based on its version. """
|
||||
if not self._filepath:
|
||||
return []
|
||||
return Graph.IO.getFeaturesForVersion(self.header.get(Graph.IO.Keys.FileVersion, "0.0"))
|
||||
|
||||
@Slot(str)
|
||||
|
@ -382,7 +380,7 @@ class Graph(BaseObject):
|
|||
node, edges = self.copyNode(srcNode, withEdges=False)
|
||||
duplicate = self.addNode(node)
|
||||
duplicateEdges.update(edges)
|
||||
duplicates[srcNode] = duplicate # original node to duplicate map
|
||||
duplicates.setdefault(srcNode, []).append(duplicate)
|
||||
|
||||
# re-create edges taking into account what has been duplicated
|
||||
for attr, linkExpression in duplicateEdges.items():
|
||||
|
@ -390,8 +388,10 @@ class Graph(BaseObject):
|
|||
# get source node and attribute name
|
||||
edgeSrcNodeName, edgeSrcAttrName = link.split(".", 1)
|
||||
edgeSrcNode = self.node(edgeSrcNodeName)
|
||||
# if the edge's source node has been duplicated, use the duplicate; otherwise use the original node
|
||||
edgeSrcNode = duplicates.get(edgeSrcNode, edgeSrcNode)
|
||||
# if the edge's source node has been duplicated (the key exists in the dictionary),
|
||||
# use the duplicate; otherwise use the original node
|
||||
if edgeSrcNode in duplicates:
|
||||
edgeSrcNode = duplicates.get(edgeSrcNode)[0]
|
||||
self.addEdge(edgeSrcNode.attribute(edgeSrcAttrName), attr)
|
||||
|
||||
return duplicates
|
||||
|
@ -547,6 +547,14 @@ class Graph(BaseObject):
|
|||
nodes = [n for n in self._nodes.values() if n.nodeType == nodeType]
|
||||
return self.sortNodesByIndex(nodes) if sortedByIndex else nodes
|
||||
|
||||
def findInitNodes(self):
|
||||
"""
|
||||
Returns:
|
||||
list[Node]: the list of Init nodes (nodes inheriting from InitNode)
|
||||
"""
|
||||
nodes = [n for n in self._nodes.values() if isinstance(n.nodeDesc, meshroom.core.desc.InitNode)]
|
||||
return nodes
|
||||
|
||||
def findNodeCandidates(self, nodeNameExpr):
|
||||
pattern = re.compile(nodeNameExpr)
|
||||
return [v for k, v in self._nodes.objects.items() if pattern.match(k)]
|
||||
|
|
|
@ -145,243 +145,6 @@ def findFilesByTypeInFolder(folder, recursive=False):
|
|||
return output
|
||||
|
||||
|
||||
def panoramaHdr(inputImages=None, inputViewpoints=None, inputIntrinsics=None, output='', graph=None):
|
||||
"""
|
||||
Create a new Graph with a Panorama HDR pipeline.
|
||||
|
||||
Args:
|
||||
inputImages (list of str, optional): list of image file paths
|
||||
inputViewpoints (list of Viewpoint, optional): list of Viewpoints
|
||||
output (str, optional): the path to export reconstructed model to
|
||||
|
||||
Returns:
|
||||
Graph: the created graph
|
||||
"""
|
||||
if not graph:
|
||||
graph = Graph('PanoramaHDR')
|
||||
with GraphModification(graph):
|
||||
nodes = panoramaHdrPipeline(graph)
|
||||
cameraInit = nodes[0]
|
||||
if inputImages:
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
if inputViewpoints:
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
if inputIntrinsics:
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
imageProcessing = nodes[-1]
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[imageProcessing.outputImages])
|
||||
|
||||
return graph
|
||||
|
||||
def panoramaFisheyeHdr(inputImages=None, inputViewpoints=None, inputIntrinsics=None, output='', graph=None):
|
||||
if not graph:
|
||||
graph = Graph('PanoramaFisheyeHDR')
|
||||
with GraphModification(graph):
|
||||
panoramaHdr(inputImages, inputViewpoints, inputIntrinsics, output, graph)
|
||||
for panoramaInit in graph.nodesOfType("PanoramaInit"):
|
||||
panoramaInit.attribute("useFisheye").value = True
|
||||
for featureExtraction in graph.nodesOfType("FeatureExtraction"):
|
||||
# when using fisheye images, 'sift' performs better than 'dspsift'
|
||||
featureExtraction.attribute("describerTypes").value = ['sift']
|
||||
# when using fisheye images, the overlap between images can be small
|
||||
# and thus requires many features to get enough correspondences for cameras estimation
|
||||
featureExtraction.attribute("describerPreset").value = 'high'
|
||||
return graph
|
||||
|
||||
def panoramaHdrPipeline(graph):
|
||||
"""
|
||||
Instantiate an PanoramaHDR pipeline inside 'graph'.
|
||||
Args:
|
||||
graph (Graph/UIGraph): the graph in which nodes should be instantiated
|
||||
|
||||
Returns:
|
||||
list of Node: the created nodes
|
||||
"""
|
||||
cameraInit = graph.addNewNode('CameraInit')
|
||||
try:
|
||||
# fisheye4 does not work well in the ParoramaEstimation, so here we avoid to use it.
|
||||
cameraInit.attribute('allowedCameraModels').value.remove("fisheye4")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
panoramaPrepareImages = graph.addNewNode('PanoramaPrepareImages',
|
||||
input=cameraInit.output)
|
||||
|
||||
ldr2hdrSampling = graph.addNewNode('LdrToHdrSampling',
|
||||
input=panoramaPrepareImages.output)
|
||||
|
||||
ldr2hdrCalibration = graph.addNewNode('LdrToHdrCalibration',
|
||||
input=ldr2hdrSampling.input,
|
||||
userNbBrackets=ldr2hdrSampling.userNbBrackets,
|
||||
byPass=ldr2hdrSampling.byPass,
|
||||
channelQuantizationPower=ldr2hdrSampling.channelQuantizationPower,
|
||||
samples=ldr2hdrSampling.output)
|
||||
|
||||
ldr2hdrMerge = graph.addNewNode('LdrToHdrMerge',
|
||||
input=ldr2hdrCalibration.input,
|
||||
userNbBrackets=ldr2hdrCalibration.userNbBrackets,
|
||||
byPass=ldr2hdrCalibration.byPass,
|
||||
channelQuantizationPower=ldr2hdrCalibration.channelQuantizationPower,
|
||||
response=ldr2hdrCalibration.response)
|
||||
|
||||
featureExtraction = graph.addNewNode('FeatureExtraction',
|
||||
input=ldr2hdrMerge.outSfMData,
|
||||
describerQuality='high')
|
||||
|
||||
panoramaInit = graph.addNewNode('PanoramaInit',
|
||||
input=featureExtraction.input,
|
||||
dependency=[featureExtraction.output] # Workaround for tractor submission with a fake dependency
|
||||
)
|
||||
|
||||
imageMatching = graph.addNewNode('ImageMatching',
|
||||
input=panoramaInit.outSfMData,
|
||||
featuresFolders=[featureExtraction.output],
|
||||
method='FrustumOrVocabularyTree')
|
||||
|
||||
featureMatching = graph.addNewNode('FeatureMatching',
|
||||
input=imageMatching.input,
|
||||
featuresFolders=imageMatching.featuresFolders,
|
||||
imagePairsList=imageMatching.output,
|
||||
describerTypes=featureExtraction.describerTypes)
|
||||
|
||||
panoramaEstimation = graph.addNewNode('PanoramaEstimation',
|
||||
input=featureMatching.input,
|
||||
featuresFolders=featureMatching.featuresFolders,
|
||||
matchesFolders=[featureMatching.output],
|
||||
describerTypes=featureMatching.describerTypes)
|
||||
|
||||
panoramaOrientation = graph.addNewNode('SfMTransform',
|
||||
input=panoramaEstimation.output,
|
||||
method='manual')
|
||||
|
||||
panoramaWarping = graph.addNewNode('PanoramaWarping',
|
||||
input=panoramaOrientation.output)
|
||||
|
||||
panoramaSeams = graph.addNewNode('PanoramaSeams',
|
||||
input=panoramaWarping.input,
|
||||
warpingFolder=panoramaWarping.output
|
||||
)
|
||||
|
||||
panoramaCompositing = graph.addNewNode('PanoramaCompositing',
|
||||
input=panoramaSeams.input,
|
||||
warpingFolder=panoramaSeams.warpingFolder,
|
||||
labels=panoramaSeams.output
|
||||
)
|
||||
|
||||
panoramaMerging = graph.addNewNode('PanoramaMerging',
|
||||
input=panoramaCompositing.input,
|
||||
compositingFolder=panoramaCompositing.output
|
||||
)
|
||||
|
||||
imageProcessing = graph.addNewNode('ImageProcessing',
|
||||
input=panoramaMerging.outputPanorama,
|
||||
fixNonFinite=True,
|
||||
fillHoles=True,
|
||||
extension='exr')
|
||||
|
||||
return [
|
||||
cameraInit,
|
||||
featureExtraction,
|
||||
panoramaInit,
|
||||
imageMatching,
|
||||
featureMatching,
|
||||
panoramaEstimation,
|
||||
panoramaOrientation,
|
||||
panoramaWarping,
|
||||
panoramaSeams,
|
||||
panoramaCompositing,
|
||||
panoramaMerging,
|
||||
imageProcessing,
|
||||
]
|
||||
|
||||
|
||||
|
||||
def photogrammetry(inputImages=list(), inputViewpoints=list(), inputIntrinsics=list(), output='', graph=None):
|
||||
"""
|
||||
Create a new Graph with a complete photogrammetry pipeline.
|
||||
|
||||
Args:
|
||||
inputImages (list of str, optional): list of image file paths
|
||||
inputViewpoints (list of Viewpoint, optional): list of Viewpoints
|
||||
output (str, optional): the path to export reconstructed model to
|
||||
|
||||
Returns:
|
||||
Graph: the created graph
|
||||
"""
|
||||
if not graph:
|
||||
graph = Graph('Photogrammetry')
|
||||
with GraphModification(graph):
|
||||
sfmNodes, mvsNodes = photogrammetryPipeline(graph)
|
||||
cameraInit = sfmNodes[0]
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
texturing = mvsNodes[-1]
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[texturing.outputMesh,
|
||||
texturing.outputMaterial,
|
||||
texturing.outputTextures])
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def photogrammetryPipeline(graph):
|
||||
"""
|
||||
Instantiate a complete photogrammetry pipeline inside 'graph'.
|
||||
|
||||
Args:
|
||||
graph (Graph/UIGraph): the graph in which nodes should be instantiated
|
||||
|
||||
Returns:
|
||||
list of Node: the created nodes
|
||||
"""
|
||||
sfmNodes = sfmPipeline(graph)
|
||||
mvsNodes = mvsPipeline(graph, sfmNodes[-1])
|
||||
|
||||
# store current pipeline version in graph header
|
||||
graph.header.update({'pipelineVersion': __version__})
|
||||
|
||||
return sfmNodes, mvsNodes
|
||||
|
||||
|
||||
def sfmPipeline(graph):
|
||||
"""
|
||||
Instantiate a SfM pipeline inside 'graph'.
|
||||
Args:
|
||||
graph (Graph/UIGraph): the graph in which nodes should be instantiated
|
||||
|
||||
Returns:
|
||||
list of Node: the created nodes
|
||||
"""
|
||||
cameraInit = graph.addNewNode('CameraInit')
|
||||
|
||||
featureExtraction = graph.addNewNode('FeatureExtraction',
|
||||
input=cameraInit.output)
|
||||
imageMatching = graph.addNewNode('ImageMatching',
|
||||
input=featureExtraction.input,
|
||||
featuresFolders=[featureExtraction.output])
|
||||
featureMatching = graph.addNewNode('FeatureMatching',
|
||||
input=imageMatching.input,
|
||||
featuresFolders=imageMatching.featuresFolders,
|
||||
imagePairsList=imageMatching.output,
|
||||
describerTypes=featureExtraction.describerTypes)
|
||||
structureFromMotion = graph.addNewNode('StructureFromMotion',
|
||||
input=featureMatching.input,
|
||||
featuresFolders=featureMatching.featuresFolders,
|
||||
matchesFolders=[featureMatching.output],
|
||||
describerTypes=featureMatching.describerTypes)
|
||||
return [
|
||||
cameraInit,
|
||||
featureExtraction,
|
||||
imageMatching,
|
||||
featureMatching,
|
||||
structureFromMotion
|
||||
]
|
||||
|
||||
|
||||
def mvsPipeline(graph, sfm=None):
|
||||
"""
|
||||
Instantiate a MVS pipeline inside 'graph'.
|
||||
|
@ -470,134 +233,3 @@ def sfmAugmentation(graph, sourceSfm, withMVS=False):
|
|||
mvsNodes = mvsPipeline(graph, structureFromMotion)
|
||||
|
||||
return sfmNodes, mvsNodes
|
||||
|
||||
|
||||
def cameraTrackingPipeline(graph, sourceSfm=None):
|
||||
"""
|
||||
Instantiate a camera tracking pipeline inside 'graph'.
|
||||
|
||||
Args:
|
||||
graph (Graph/UIGraph): the graph in which nodes should be instantiated
|
||||
|
||||
Returns:
|
||||
list of Node: the created nodes
|
||||
"""
|
||||
|
||||
with GraphModification(graph):
|
||||
if sourceSfm is None:
|
||||
cameraInitT, featureExtractionT, imageMatchingT, featureMatchingT, structureFromMotionT = sfmPipeline(graph)
|
||||
else:
|
||||
sfmNodes, _ = sfmAugmentation(graph, sourceSfm)
|
||||
cameraInitT, featureExtractionT, imageMatchingT, featureMatchingT, structureFromMotionT = sfmNodes
|
||||
|
||||
distortionCalibrationT = graph.addNewNode('DistortionCalibration',
|
||||
input=cameraInitT.output)
|
||||
|
||||
graph.removeEdge(featureMatchingT.input)
|
||||
graph.addEdge(distortionCalibrationT.outSfMData, featureMatchingT.input)
|
||||
|
||||
imageMatchingT.attribute("nbMatches").value = 5 # voctree nb matches
|
||||
imageMatchingT.attribute("nbNeighbors").value = 10
|
||||
|
||||
structureFromMotionT.attribute("minNumberOfMatches").value = 0
|
||||
structureFromMotionT.attribute("minInputTrackLength").value = 5
|
||||
structureFromMotionT.attribute("minNumberOfObservationsForTriangulation").value = 3
|
||||
structureFromMotionT.attribute("minAngleForTriangulation").value = 1.0
|
||||
structureFromMotionT.attribute("minAngleForLandmark").value = 0.5
|
||||
|
||||
exportAnimatedCameraT = graph.addNewNode('ExportAnimatedCamera', input=structureFromMotionT.output)
|
||||
if sourceSfm:
|
||||
graph.addEdge(sourceSfm.output, exportAnimatedCameraT.sfmDataFilter)
|
||||
|
||||
# store current pipeline version in graph header
|
||||
graph.header.update({'pipelineVersion': __version__})
|
||||
|
||||
return [
|
||||
cameraInitT,
|
||||
featureExtractionT,
|
||||
imageMatchingT,
|
||||
featureMatchingT,
|
||||
distortionCalibrationT,
|
||||
structureFromMotionT,
|
||||
exportAnimatedCameraT,
|
||||
]
|
||||
|
||||
|
||||
def cameraTracking(inputImages=list(), inputViewpoints=list(), inputIntrinsics=list(), output='', graph=None):
|
||||
if not graph:
|
||||
graph = Graph('Camera Tracking')
|
||||
with GraphModification(graph):
|
||||
trackingNodes = cameraTrackingPipeline(graph)
|
||||
cameraInit = trackingNodes[0]
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
exportNode = trackingNodes[-1]
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[exportNode.output])
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def photogrammetryAndCameraTracking(inputImages=list(), inputViewpoints=list(), inputIntrinsics=list(), output='', graph=None):
|
||||
if not graph:
|
||||
graph = Graph('Photogrammetry And Camera Tracking')
|
||||
with GraphModification(graph):
|
||||
cameraInit, featureExtraction, imageMatching, featureMatching, structureFromMotion = sfmPipeline(graph)
|
||||
|
||||
cameraInitT, featureExtractionT, imageMatchingMultiT, featureMatchingT, distortionCalibrationT, structureFromMotionT, exportAnimatedCameraT = cameraTrackingPipeline(graph, structureFromMotion)
|
||||
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[exportAnimatedCameraT.output])
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def photogrammetryDraft(inputImages=None, inputViewpoints=None, inputIntrinsics=None, output='', graph=None):
|
||||
"""
|
||||
Create a new Graph with a complete photogrammetry pipeline without requiring a NVIDIA CUDA video card. Something also named Draft Meshing.
|
||||
More information on that pipeline https://github.com/alicevision/meshroom/wiki/Draft-Meshing
|
||||
|
||||
Args:
|
||||
inputImages (list of str, optional): list of image file paths
|
||||
inputViewpoints (list of Viewpoint, optional): list of Viewpoints
|
||||
output (str, optional): the path to export reconstructed model to
|
||||
|
||||
Returns:
|
||||
Graph: the created graph
|
||||
"""
|
||||
if not graph:
|
||||
graph = Graph('PhotogrammetryDraft')
|
||||
with GraphModification(graph):
|
||||
sfmNodes = sfmPipeline(graph)
|
||||
sfmNode = sfmNodes[-1]
|
||||
|
||||
meshing = graph.addNewNode('Meshing',
|
||||
input=sfmNode.output)
|
||||
|
||||
meshFiltering = graph.addNewNode('MeshFiltering',
|
||||
inputMesh=meshing.outputMesh)
|
||||
texturing = graph.addNewNode('Texturing',
|
||||
input=meshing.output,
|
||||
inputMesh=meshFiltering.outputMesh)
|
||||
|
||||
cameraInit = sfmNodes[0]
|
||||
|
||||
if inputImages:
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
if inputViewpoints:
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
if inputIntrinsics:
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[texturing.outputMesh,
|
||||
texturing.outputMaterial,
|
||||
texturing.outputTextures])
|
||||
|
||||
return graph
|
||||
|
|
|
@ -8,7 +8,7 @@ import tempfile
|
|||
import logging
|
||||
|
||||
from meshroom.core import desc, Version
|
||||
|
||||
from meshroom.multiview import FilesByType, findFilesByTypeInFolder
|
||||
|
||||
Viewpoint = [
|
||||
desc.IntParam(name="viewId", label="Id", description="Image UID", value=-1, uid=[0], range=None),
|
||||
|
@ -119,7 +119,8 @@ def readSfMData(sfmFile):
|
|||
|
||||
return views, intrinsics
|
||||
|
||||
class CameraInit(desc.CommandLineNode):
|
||||
|
||||
class CameraInit(desc.CommandLineNode, desc.InitNode):
|
||||
commandLine = 'aliceVision_cameraInit {allParams} --allowSingleView 1' # don't throw an error if there is only one image
|
||||
|
||||
size = desc.DynamicNodeSize('viewpoints')
|
||||
|
@ -159,7 +160,7 @@ The metadata needed are:
|
|||
name='sensorDatabase',
|
||||
label='Sensor Database',
|
||||
description='''Camera sensor width database path.''',
|
||||
value=os.environ.get('ALICEVISION_SENSOR_DB', ''),
|
||||
value='${ALICEVISION_SENSOR_DB}',
|
||||
uid=[],
|
||||
),
|
||||
desc.FloatParam(
|
||||
|
@ -250,6 +251,35 @@ The metadata needed are:
|
|||
),
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
super(CameraInit, self).__init__()
|
||||
|
||||
def initialize(self, node, inputs, recursiveInputs):
|
||||
# Reset graph inputs
|
||||
self.resetAttributes(node, ["viewpoints", "intrinsics"])
|
||||
|
||||
filesByType = FilesByType()
|
||||
searchedForImages = False
|
||||
|
||||
if recursiveInputs:
|
||||
filesByType.extend(findFilesByTypeInFolder(recursiveInputs, recursive=True))
|
||||
searchedForImages = True
|
||||
|
||||
# Add views and intrinsics from a file if it was provided, or look for the images
|
||||
if len(inputs) == 1 and os.path.isfile(inputs[0]) and os.path.splitext(inputs[0])[-1] in ('.json', '.sfm'):
|
||||
views, intrinsics = readSfMData(inputs[0])
|
||||
self.extendAttributes(node, {"viewpoints": views, "intrinsics": intrinsics})
|
||||
else:
|
||||
filesByType.extend(findFilesByTypeInFolder(inputs, recursive=False))
|
||||
searchedForImages = True
|
||||
|
||||
# If there was no input file, check that the directories do contain images
|
||||
if searchedForImages and not filesByType.images:
|
||||
raise ValueError("No valid input file or no image in the provided directories")
|
||||
|
||||
views, intrinsics = self.buildIntrinsics(node, filesByType.images)
|
||||
self.setAttributes(node, {"viewpoints": views, "intrinsics": intrinsics})
|
||||
|
||||
def upgradeAttributeValues(self, attrValues, fromVersion):
|
||||
|
||||
# Starting with version 6, the principal point is now relative to the image center
|
||||
|
|
|
@ -125,7 +125,7 @@ class CameraLocalization(desc.CommandLineNode):
|
|||
name='voctree',
|
||||
label='Voctree',
|
||||
description='''[voctree] Filename for the vocabulary tree''',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[0],
|
||||
),
|
||||
desc.File(
|
||||
|
|
|
@ -109,7 +109,7 @@ class CameraRigCalibration(desc.CommandLineNode):
|
|||
name='voctree',
|
||||
label='Voctree',
|
||||
description='''[voctree] Filename for the vocabulary tree''',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[0],
|
||||
),
|
||||
desc.File(
|
||||
|
|
|
@ -116,7 +116,7 @@ class CameraRigLocalization(desc.CommandLineNode):
|
|||
name='voctree',
|
||||
label='Voctree',
|
||||
description='''[voctree] Filename for the vocabulary tree''',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[0],
|
||||
),
|
||||
desc.File(
|
||||
|
|
|
@ -74,7 +74,7 @@ If images have known poses, use frustum intersection else use VocabularuTree.
|
|||
name='tree',
|
||||
label='Voc Tree: Tree',
|
||||
description='Input name for the vocabulary tree file.',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[],
|
||||
enabled=lambda node: 'VocabularyTree' in node.method.value,
|
||||
),
|
||||
|
|
|
@ -65,7 +65,7 @@ Thanks to this node, the FeatureMatching node will only compute the matches betw
|
|||
name='tree',
|
||||
label='Voc Tree: Tree',
|
||||
description='Input name for the vocabulary tree file.',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[],
|
||||
enabled=lambda node: 'VocabularyTree' in node.method.value,
|
||||
),
|
||||
|
|
|
@ -289,6 +289,15 @@ Convert or apply filtering to the input images.
|
|||
exclusive=True,
|
||||
uid=[0],
|
||||
),
|
||||
desc.ChoiceParam(
|
||||
name='outputColorSpace',
|
||||
label='Output Color Space',
|
||||
description='Allows you to choose the color space of the output image.',
|
||||
value='AUTO',
|
||||
values=['AUTO', 'sRGB', 'Linear', 'ACES', 'ACEScg'],
|
||||
exclusive=True,
|
||||
uid=[0],
|
||||
),
|
||||
desc.ChoiceParam(
|
||||
name='storageDataType',
|
||||
label='Storage Data Type for EXR output',
|
||||
|
|
|
@ -95,14 +95,14 @@ You can extract frames at regular interval by configuring only the min/maxFrameS
|
|||
name='sensorDbPath',
|
||||
label='Sensor Db Path',
|
||||
description='''Camera sensor width database path.''',
|
||||
value=os.environ.get('ALICEVISION_SENSOR_DB', ''),
|
||||
value='${ALICEVISION_SENSOR_DB}',
|
||||
uid=[0],
|
||||
),
|
||||
desc.File(
|
||||
name='voctreePath',
|
||||
label='Voctree Path',
|
||||
description='''Vocabulary tree path.''',
|
||||
value=os.environ.get('ALICEVISION_VOCTREE', ''),
|
||||
value='${ALICEVISION_VOCTREE}',
|
||||
uid=[0],
|
||||
),
|
||||
desc.BoolParam(
|
||||
|
|
278
meshroom/pipelines/cameraTracking.mg
Normal file
278
meshroom/pipelines/cameraTracking.mg
Normal file
|
@ -0,0 +1,278 @@
|
|||
{
|
||||
"header": {
|
||||
"pipelineVersion": "2.2",
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1",
|
||||
"nodesVersions": {
|
||||
"ExportAnimatedCamera": "2.0",
|
||||
"FeatureMatching": "2.0",
|
||||
"DistortionCalibration": "2.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"StructureFromMotion": "2.0"
|
||||
}
|
||||
},
|
||||
"graph": {
|
||||
"DistortionCalibration_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{CameraInit_1.output}",
|
||||
"lensGrid": []
|
||||
},
|
||||
"nodeType": "DistortionCalibration",
|
||||
"uids": {
|
||||
"0": "8afea9d171904cdb6ba1c0b116cb60de3ccb6fb4"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
160
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 10,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 5,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"method": "SequentialAndVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "832b744de5fa804d7d63ea255419b1afaf24f723"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "normal",
|
||||
"gridFiltering": true,
|
||||
"input": "{CameraInit_1.output}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "a07fb8d05b63327d05461954c2fd2a00f201275b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"StructureFromMotion_1": {
|
||||
"inputs": {
|
||||
"localizerEstimatorMaxIterations": 4096,
|
||||
"minAngleForLandmark": 0.5,
|
||||
"filterTrackForks": false,
|
||||
"minNumberOfObservationsForTriangulation": 3,
|
||||
"maxAngleInitialPair": 40.0,
|
||||
"observationConstraint": "Scale",
|
||||
"maxNumberOfMatches": 0,
|
||||
"localizerEstimator": "acransac",
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"lockScenePreviouslyReconstructed": false,
|
||||
"localBAGraphDistance": 1,
|
||||
"minNbCamerasToRefinePrincipalPoint": 3,
|
||||
"lockAllIntrinsics": false,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}",
|
||||
"useRigConstraint": true,
|
||||
"rigMinNbCamerasForCalibration": 20,
|
||||
"initialPairA": "",
|
||||
"initialPairB": "",
|
||||
"interFileExtension": ".abc",
|
||||
"useLocalBA": true,
|
||||
"computeStructureColor": true,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"minInputTrackLength": 5,
|
||||
"useOnlyMatchesFromInputFolder": false,
|
||||
"verboseLevel": "info",
|
||||
"minAngleForTriangulation": 1.0,
|
||||
"maxReprojectionError": 4.0,
|
||||
"minAngleInitialPair": 5.0,
|
||||
"minNumberOfMatches": 0,
|
||||
"localizerEstimatorError": 0.0
|
||||
},
|
||||
"nodeType": "StructureFromMotion",
|
||||
"uids": {
|
||||
"0": "4d198974784fd71f5a1c189e10c2914e56523585"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/sfm.abc",
|
||||
"extraInfoFolder": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ExportAnimatedCamera_1": {
|
||||
"inputs": {
|
||||
"exportFullROD": false,
|
||||
"undistortedImageType": "exr",
|
||||
"exportUVMaps": true,
|
||||
"verboseLevel": "info",
|
||||
"sfmDataFilter": "",
|
||||
"exportUndistortedImages": false,
|
||||
"input": "{StructureFromMotion_1.output}",
|
||||
"viewFilter": "",
|
||||
"correctPrincipalPoint": true
|
||||
},
|
||||
"nodeType": "ExportAnimatedCamera",
|
||||
"uids": {
|
||||
"0": "31413f19e51b239874733f13f9628286fd185c18"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputUndistorted": "{cache}/{nodeType}/{uid0}/undistort",
|
||||
"outputCamera": "{cache}/{nodeType}/{uid0}/camera.abc"
|
||||
},
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye4",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{DistortionCalibration_1.outSfMData}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "8386c096445d6988ea7d14f1ae3192978a4dd2e8"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
591
meshroom/pipelines/panoramaFisheyeHdr.mg
Normal file
591
meshroom/pipelines/panoramaFisheyeHdr.mg
Normal file
|
@ -0,0 +1,591 @@
|
|||
{
|
||||
"header": {
|
||||
"nodesVersions": {
|
||||
"PanoramaSeams": "2.0",
|
||||
"FeatureMatching": "2.0",
|
||||
"ImageProcessing": "3.0",
|
||||
"PanoramaCompositing": "2.0",
|
||||
"LdrToHdrMerge": "4.0",
|
||||
"LdrToHdrSampling": "4.0",
|
||||
"LdrToHdrCalibration": "3.0",
|
||||
"PanoramaEstimation": "1.0",
|
||||
"PanoramaInit": "2.0",
|
||||
"PanoramaMerging": "1.0",
|
||||
"SfMTransform": "3.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"PanoramaPrepareImages": "1.1",
|
||||
"PanoramaWarping": "1.0"
|
||||
},
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1"
|
||||
},
|
||||
"graph": {
|
||||
"LdrToHdrMerge_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"fusionWeight": "gaussian",
|
||||
"channelQuantizationPower": "{LdrToHdrCalibration_1.channelQuantizationPower}",
|
||||
"nbBrackets": 0,
|
||||
"enableHighlight": false,
|
||||
"offsetRefBracketIndex": 1,
|
||||
"storageDataType": "float",
|
||||
"highlightTargetLux": 120000.0,
|
||||
"byPass": "{LdrToHdrCalibration_1.byPass}",
|
||||
"highlightCorrectionFactor": 1.0,
|
||||
"input": "{LdrToHdrCalibration_1.input}",
|
||||
"userNbBrackets": "{LdrToHdrCalibration_1.userNbBrackets}",
|
||||
"response": "{LdrToHdrCalibration_1.response}"
|
||||
},
|
||||
"nodeType": "LdrToHdrMerge",
|
||||
"uids": {
|
||||
"0": "9b90e3b468adc487fe2905e0cc78328216966317"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 2,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageProcessing_1": {
|
||||
"inputs": {
|
||||
"outputFormat": "rgba",
|
||||
"sharpenFilter": {
|
||||
"threshold": 0.0,
|
||||
"width": 3,
|
||||
"sharpenFilterEnabled": false,
|
||||
"contrast": 1.0
|
||||
},
|
||||
"extension": "exr",
|
||||
"exposureCompensation": false,
|
||||
"storageDataType": "float",
|
||||
"inputFolders": [],
|
||||
"verboseLevel": "info",
|
||||
"metadataFolders": [],
|
||||
"claheFilter": {
|
||||
"claheClipLimit": 4.0,
|
||||
"claheTileGridSize": 8,
|
||||
"claheEnabled": false
|
||||
},
|
||||
"medianFilter": 0,
|
||||
"fillHoles": true,
|
||||
"reconstructedViewsOnly": false,
|
||||
"input": "{PanoramaMerging_1.outputPanorama}",
|
||||
"noiseFilter": {
|
||||
"noiseEnabled": false,
|
||||
"noiseMethod": "uniform",
|
||||
"noiseB": 1.0,
|
||||
"noiseMono": true,
|
||||
"noiseA": 0.0
|
||||
},
|
||||
"scaleFactor": 1.0,
|
||||
"bilateralFilter": {
|
||||
"bilateralFilterDistance": 0,
|
||||
"bilateralFilterSigmaColor": 0.0,
|
||||
"bilateralFilterSigmaSpace": 0.0,
|
||||
"bilateralFilterEnabled": false
|
||||
},
|
||||
"contrast": 1.0,
|
||||
"fixNonFinite": true
|
||||
},
|
||||
"nodeType": "ImageProcessing",
|
||||
"uids": {
|
||||
"0": "494b97af203ddbe4767c922a6c5795297cf53eef"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputImages": "{cache}/{nodeType}/{uid0}/panorama.exr",
|
||||
"outSfMData": ""
|
||||
},
|
||||
"position": [
|
||||
3000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaWarping_1": {
|
||||
"inputs": {
|
||||
"panoramaWidth": 10000,
|
||||
"maxPanoramaWidth": 70000,
|
||||
"verboseLevel": "info",
|
||||
"percentUpscale": 50,
|
||||
"input": "{SfMTransform_1.output}",
|
||||
"storageDataType": "float",
|
||||
"estimateResolution": true
|
||||
},
|
||||
"nodeType": "PanoramaWarping",
|
||||
"uids": {
|
||||
"0": "45cca14aba2a8c4f68c79a15d3fbc48f30ae9d66"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 5,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
2200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"LdrToHdrCalibration_1": {
|
||||
"inputs": {
|
||||
"samples": "{LdrToHdrSampling_1.output}",
|
||||
"channelQuantizationPower": "{LdrToHdrSampling_1.channelQuantizationPower}",
|
||||
"maxTotalPoints": 1000000,
|
||||
"nbBrackets": 0,
|
||||
"calibrationMethod": "debevec",
|
||||
"calibrationWeight": "default",
|
||||
"verboseLevel": "info",
|
||||
"byPass": "{LdrToHdrSampling_1.byPass}",
|
||||
"input": "{LdrToHdrSampling_1.input}",
|
||||
"userNbBrackets": "{LdrToHdrSampling_1.userNbBrackets}"
|
||||
},
|
||||
"nodeType": "LdrToHdrCalibration",
|
||||
"uids": {
|
||||
"0": "9225abd943d28be4387a8a8902711d0b7c604a2a"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"response": "{cache}/{nodeType}/{uid0}/response.csv"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"LdrToHdrSampling_1": {
|
||||
"inputs": {
|
||||
"blockSize": 256,
|
||||
"nbBrackets": 0,
|
||||
"verboseLevel": "info",
|
||||
"radius": 5,
|
||||
"byPass": false,
|
||||
"channelQuantizationPower": 10,
|
||||
"debug": false,
|
||||
"input": "{PanoramaPrepareImages_1.output}",
|
||||
"maxCountSample": 200,
|
||||
"userNbBrackets": 0
|
||||
},
|
||||
"nodeType": "LdrToHdrSampling",
|
||||
"uids": {
|
||||
"0": "af67674ecc8524592fe2b217259c241167e28dcd"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 2,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 5,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 40,
|
||||
"input": "{PanoramaInit_1.outSfMData}",
|
||||
"method": "FrustumOrVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "a076f9e959d62b3a6f63d3f6493527b857eab8d6"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
1400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"sift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "high",
|
||||
"gridFiltering": true,
|
||||
"input": "{LdrToHdrMerge_1.outSfMData}",
|
||||
"describerPreset": "high"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "04f8824c2e2f206b47f05edaf76def15fa91446b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaSeams_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{PanoramaWarping_1.input}",
|
||||
"warpingFolder": "{PanoramaWarping_1.output}",
|
||||
"maxWidth": 5000,
|
||||
"useGraphCut": true
|
||||
},
|
||||
"nodeType": "PanoramaSeams",
|
||||
"uids": {
|
||||
"0": "dd02562c5c3b1e18e42561d99590cbf4ff5ba35a"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/labels.exr"
|
||||
},
|
||||
"position": [
|
||||
2400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaCompositing_1": {
|
||||
"inputs": {
|
||||
"warpingFolder": "{PanoramaSeams_1.warpingFolder}",
|
||||
"maxThreads": 4,
|
||||
"labels": "{PanoramaSeams_1.output}",
|
||||
"verboseLevel": "info",
|
||||
"overlayType": "none",
|
||||
"compositerType": "multiband",
|
||||
"input": "{PanoramaSeams_1.input}",
|
||||
"storageDataType": "float"
|
||||
},
|
||||
"nodeType": "PanoramaCompositing",
|
||||
"uids": {
|
||||
"0": "1f1e629021e2280291046226e009a52dbb7809c1"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 5,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
2600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaPrepareImages_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{CameraInit_1.output}"
|
||||
},
|
||||
"nodeType": "PanoramaPrepareImages",
|
||||
"uids": {
|
||||
"0": "6956c52a8d18cb4cdb7ceb0db68f4deb84a37aee"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"SfMTransform_1": {
|
||||
"inputs": {
|
||||
"applyScale": true,
|
||||
"scale": 1.0,
|
||||
"applyTranslation": true,
|
||||
"landmarksDescriberTypes": [
|
||||
"sift",
|
||||
"dspsift",
|
||||
"akaze"
|
||||
],
|
||||
"markers": [],
|
||||
"method": "manual",
|
||||
"verboseLevel": "info",
|
||||
"input": "{PanoramaEstimation_1.output}",
|
||||
"applyRotation": true,
|
||||
"manualTransform": {
|
||||
"manualTranslation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"manualRotation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"manualScale": 1.0
|
||||
},
|
||||
"transformation": ""
|
||||
},
|
||||
"nodeType": "SfMTransform",
|
||||
"uids": {
|
||||
"0": "b8568fb40b68b42ac80c18df2dcdf600744fe3e1"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/panorama.abc",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
2000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaMerging_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"compositingFolder": "{PanoramaCompositing_1.output}",
|
||||
"outputFileType": "exr",
|
||||
"storageDataType": "float",
|
||||
"input": "{PanoramaCompositing_1.input}"
|
||||
},
|
||||
"nodeType": "PanoramaMerging",
|
||||
"uids": {
|
||||
"0": "70edd7fe8194bf35dcb0b221141cd4abd2354547"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outputPanorama": "{cache}/{nodeType}/{uid0}/panorama.{outputFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
2800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaEstimation_1": {
|
||||
"inputs": {
|
||||
"intermediateRefineWithFocalDist": false,
|
||||
"offsetLongitude": 0.0,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"filterMatches": false,
|
||||
"rotationAveragingWeighting": true,
|
||||
"offsetLatitude": 0.0,
|
||||
"verboseLevel": "info",
|
||||
"maxAngularError": 100.0,
|
||||
"lockAllIntrinsics": false,
|
||||
"refine": true,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"intermediateRefineWithFocal": false,
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"relativeRotation": "rotation_matrix",
|
||||
"maxAngleToPrior": 20.0,
|
||||
"rotationAveraging": "L2_minimization",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "PanoramaEstimation",
|
||||
"uids": {
|
||||
"0": "47b0976fc98eefcbc0342bbb63e7d27ef3e0d4de"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/panorama.abc",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
1800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaInit_1": {
|
||||
"inputs": {
|
||||
"useFisheye": true,
|
||||
"fisheyeCenterOffset": {
|
||||
"fisheyeCenterOffset_y": 0.0,
|
||||
"fisheyeCenterOffset_x": 0.0
|
||||
},
|
||||
"initializeCameras": "No",
|
||||
"nbViewsPerLine": [],
|
||||
"debugFisheyeCircleEstimation": false,
|
||||
"verboseLevel": "info",
|
||||
"dependency": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
],
|
||||
"estimateFisheyeCircle": true,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"yawCW": 1,
|
||||
"config": "",
|
||||
"fisheyeRadius": 96.0,
|
||||
"inputAngle": "None"
|
||||
},
|
||||
"nodeType": "PanoramaInit",
|
||||
"uids": {
|
||||
"0": "2fd95a957eb42ffc8fb1c24d2666afcd859ba079"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
1200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{ImageMatching_1.input}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "c0fbe0b12fe47ada6a1ca8f74d266e99c1cc548c"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
591
meshroom/pipelines/panoramaHdr.mg
Normal file
591
meshroom/pipelines/panoramaHdr.mg
Normal file
|
@ -0,0 +1,591 @@
|
|||
{
|
||||
"header": {
|
||||
"nodesVersions": {
|
||||
"PanoramaSeams": "2.0",
|
||||
"FeatureMatching": "2.0",
|
||||
"ImageProcessing": "3.0",
|
||||
"PanoramaCompositing": "2.0",
|
||||
"LdrToHdrMerge": "4.0",
|
||||
"LdrToHdrSampling": "4.0",
|
||||
"LdrToHdrCalibration": "3.0",
|
||||
"PanoramaEstimation": "1.0",
|
||||
"PanoramaInit": "2.0",
|
||||
"PanoramaMerging": "1.0",
|
||||
"SfMTransform": "3.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"PanoramaPrepareImages": "1.1",
|
||||
"PanoramaWarping": "1.0"
|
||||
},
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1"
|
||||
},
|
||||
"graph": {
|
||||
"LdrToHdrMerge_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"fusionWeight": "gaussian",
|
||||
"channelQuantizationPower": "{LdrToHdrCalibration_1.channelQuantizationPower}",
|
||||
"nbBrackets": 0,
|
||||
"enableHighlight": false,
|
||||
"offsetRefBracketIndex": 1,
|
||||
"storageDataType": "float",
|
||||
"highlightTargetLux": 120000.0,
|
||||
"byPass": "{LdrToHdrCalibration_1.byPass}",
|
||||
"highlightCorrectionFactor": 1.0,
|
||||
"input": "{LdrToHdrCalibration_1.input}",
|
||||
"userNbBrackets": "{LdrToHdrCalibration_1.userNbBrackets}",
|
||||
"response": "{LdrToHdrCalibration_1.response}"
|
||||
},
|
||||
"nodeType": "LdrToHdrMerge",
|
||||
"uids": {
|
||||
"0": "9b90e3b468adc487fe2905e0cc78328216966317"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 2,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageProcessing_1": {
|
||||
"inputs": {
|
||||
"outputFormat": "rgba",
|
||||
"sharpenFilter": {
|
||||
"threshold": 0.0,
|
||||
"width": 3,
|
||||
"sharpenFilterEnabled": false,
|
||||
"contrast": 1.0
|
||||
},
|
||||
"extension": "exr",
|
||||
"exposureCompensation": false,
|
||||
"storageDataType": "float",
|
||||
"inputFolders": [],
|
||||
"verboseLevel": "info",
|
||||
"metadataFolders": [],
|
||||
"claheFilter": {
|
||||
"claheClipLimit": 4.0,
|
||||
"claheTileGridSize": 8,
|
||||
"claheEnabled": false
|
||||
},
|
||||
"medianFilter": 0,
|
||||
"fillHoles": true,
|
||||
"reconstructedViewsOnly": false,
|
||||
"input": "{PanoramaMerging_1.outputPanorama}",
|
||||
"noiseFilter": {
|
||||
"noiseEnabled": false,
|
||||
"noiseMethod": "uniform",
|
||||
"noiseB": 1.0,
|
||||
"noiseMono": true,
|
||||
"noiseA": 0.0
|
||||
},
|
||||
"scaleFactor": 1.0,
|
||||
"bilateralFilter": {
|
||||
"bilateralFilterDistance": 0,
|
||||
"bilateralFilterSigmaColor": 0.0,
|
||||
"bilateralFilterSigmaSpace": 0.0,
|
||||
"bilateralFilterEnabled": false
|
||||
},
|
||||
"contrast": 1.0,
|
||||
"fixNonFinite": true
|
||||
},
|
||||
"nodeType": "ImageProcessing",
|
||||
"uids": {
|
||||
"0": "d7845b276d97c3489223cce16a1e9d581d98a832"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputImages": "{cache}/{nodeType}/{uid0}/panorama.exr",
|
||||
"outSfMData": ""
|
||||
},
|
||||
"position": [
|
||||
3000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaWarping_1": {
|
||||
"inputs": {
|
||||
"panoramaWidth": 10000,
|
||||
"maxPanoramaWidth": 70000,
|
||||
"verboseLevel": "info",
|
||||
"percentUpscale": 50,
|
||||
"input": "{SfMTransform_1.output}",
|
||||
"storageDataType": "float",
|
||||
"estimateResolution": true
|
||||
},
|
||||
"nodeType": "PanoramaWarping",
|
||||
"uids": {
|
||||
"0": "f2971d0c73b15fa99cbccbc9515de346ca141a1e"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 5,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
2200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"LdrToHdrCalibration_1": {
|
||||
"inputs": {
|
||||
"samples": "{LdrToHdrSampling_1.output}",
|
||||
"channelQuantizationPower": "{LdrToHdrSampling_1.channelQuantizationPower}",
|
||||
"maxTotalPoints": 1000000,
|
||||
"nbBrackets": 0,
|
||||
"calibrationMethod": "debevec",
|
||||
"calibrationWeight": "default",
|
||||
"verboseLevel": "info",
|
||||
"byPass": "{LdrToHdrSampling_1.byPass}",
|
||||
"input": "{LdrToHdrSampling_1.input}",
|
||||
"userNbBrackets": "{LdrToHdrSampling_1.userNbBrackets}"
|
||||
},
|
||||
"nodeType": "LdrToHdrCalibration",
|
||||
"uids": {
|
||||
"0": "9225abd943d28be4387a8a8902711d0b7c604a2a"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"response": "{cache}/{nodeType}/{uid0}/response.csv"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"LdrToHdrSampling_1": {
|
||||
"inputs": {
|
||||
"blockSize": 256,
|
||||
"nbBrackets": 0,
|
||||
"verboseLevel": "info",
|
||||
"radius": 5,
|
||||
"byPass": false,
|
||||
"channelQuantizationPower": 10,
|
||||
"debug": false,
|
||||
"input": "{PanoramaPrepareImages_1.output}",
|
||||
"maxCountSample": 200,
|
||||
"userNbBrackets": 0
|
||||
},
|
||||
"nodeType": "LdrToHdrSampling",
|
||||
"uids": {
|
||||
"0": "af67674ecc8524592fe2b217259c241167e28dcd"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 2,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 5,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 40,
|
||||
"input": "{PanoramaInit_1.outSfMData}",
|
||||
"method": "FrustumOrVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "7efc9cd43585003fc6eec0776a704e358f0a15de"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
1400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "high",
|
||||
"gridFiltering": true,
|
||||
"input": "{LdrToHdrMerge_1.outSfMData}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "1863cc0989ab0fd910d4fe293074ff94c4e586a1"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaSeams_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{PanoramaWarping_1.input}",
|
||||
"warpingFolder": "{PanoramaWarping_1.output}",
|
||||
"maxWidth": 5000,
|
||||
"useGraphCut": true
|
||||
},
|
||||
"nodeType": "PanoramaSeams",
|
||||
"uids": {
|
||||
"0": "0ee6da171bd684358b7c64dcc631f81ba743e1fa"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/labels.exr"
|
||||
},
|
||||
"position": [
|
||||
2400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaCompositing_1": {
|
||||
"inputs": {
|
||||
"warpingFolder": "{PanoramaSeams_1.warpingFolder}",
|
||||
"maxThreads": 4,
|
||||
"labels": "{PanoramaSeams_1.output}",
|
||||
"verboseLevel": "info",
|
||||
"overlayType": "none",
|
||||
"compositerType": "multiband",
|
||||
"input": "{PanoramaSeams_1.input}",
|
||||
"storageDataType": "float"
|
||||
},
|
||||
"nodeType": "PanoramaCompositing",
|
||||
"uids": {
|
||||
"0": "8aba78572808d012e0bb376503c2016df943b3f0"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 5,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
2600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaPrepareImages_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{CameraInit_1.output}"
|
||||
},
|
||||
"nodeType": "PanoramaPrepareImages",
|
||||
"uids": {
|
||||
"0": "6956c52a8d18cb4cdb7ceb0db68f4deb84a37aee"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"SfMTransform_1": {
|
||||
"inputs": {
|
||||
"applyScale": true,
|
||||
"scale": 1.0,
|
||||
"applyTranslation": true,
|
||||
"landmarksDescriberTypes": [
|
||||
"sift",
|
||||
"dspsift",
|
||||
"akaze"
|
||||
],
|
||||
"markers": [],
|
||||
"method": "manual",
|
||||
"verboseLevel": "info",
|
||||
"input": "{PanoramaEstimation_1.output}",
|
||||
"applyRotation": true,
|
||||
"manualTransform": {
|
||||
"manualTranslation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"manualRotation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"manualScale": 1.0
|
||||
},
|
||||
"transformation": ""
|
||||
},
|
||||
"nodeType": "SfMTransform",
|
||||
"uids": {
|
||||
"0": "c72641a2cca50759bcf5283ae6e0b6f7abc3fe4a"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/panorama.abc",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
2000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaMerging_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"compositingFolder": "{PanoramaCompositing_1.output}",
|
||||
"outputFileType": "exr",
|
||||
"storageDataType": "float",
|
||||
"input": "{PanoramaCompositing_1.input}"
|
||||
},
|
||||
"nodeType": "PanoramaMerging",
|
||||
"uids": {
|
||||
"0": "e007a4eb5fc5937b320638eba667cea183c0c642"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outputPanorama": "{cache}/{nodeType}/{uid0}/panorama.{outputFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
2800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaEstimation_1": {
|
||||
"inputs": {
|
||||
"intermediateRefineWithFocalDist": false,
|
||||
"offsetLongitude": 0.0,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"filterMatches": false,
|
||||
"rotationAveragingWeighting": true,
|
||||
"offsetLatitude": 0.0,
|
||||
"verboseLevel": "info",
|
||||
"maxAngularError": 100.0,
|
||||
"lockAllIntrinsics": false,
|
||||
"refine": true,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"intermediateRefineWithFocal": false,
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"relativeRotation": "rotation_matrix",
|
||||
"maxAngleToPrior": 20.0,
|
||||
"rotationAveraging": "L2_minimization",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "PanoramaEstimation",
|
||||
"uids": {
|
||||
"0": "de946a7c1080873d15c9eb8a0523b544cf548719"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/panorama.abc",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
1800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PanoramaInit_1": {
|
||||
"inputs": {
|
||||
"useFisheye": false,
|
||||
"fisheyeCenterOffset": {
|
||||
"fisheyeCenterOffset_y": 0.0,
|
||||
"fisheyeCenterOffset_x": 0.0
|
||||
},
|
||||
"initializeCameras": "No",
|
||||
"nbViewsPerLine": [],
|
||||
"debugFisheyeCircleEstimation": false,
|
||||
"verboseLevel": "info",
|
||||
"dependency": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
],
|
||||
"estimateFisheyeCircle": true,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"yawCW": 1,
|
||||
"config": "",
|
||||
"fisheyeRadius": 96.0,
|
||||
"inputAngle": "None"
|
||||
},
|
||||
"nodeType": "PanoramaInit",
|
||||
"uids": {
|
||||
"0": "702d6b973342e9203b50afea1470b4c01eb90174"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
1200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{ImageMatching_1.input}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "cec6da6e894230ab66683c2e959bc9581ea5430e"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
523
meshroom/pipelines/photogrammetry.mg
Normal file
523
meshroom/pipelines/photogrammetry.mg
Normal file
|
@ -0,0 +1,523 @@
|
|||
{
|
||||
"header": {
|
||||
"pipelineVersion": "2.2",
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1",
|
||||
"nodesVersions": {
|
||||
"FeatureMatching": "2.0",
|
||||
"MeshFiltering": "3.0",
|
||||
"Texturing": "6.0",
|
||||
"PrepareDenseScene": "3.0",
|
||||
"DepthMap": "2.0",
|
||||
"Meshing": "7.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"StructureFromMotion": "2.0",
|
||||
"DepthMapFilter": "3.0"
|
||||
}
|
||||
},
|
||||
"graph": {
|
||||
"Texturing_1": {
|
||||
"inputs": {
|
||||
"imagesFolder": "{DepthMap_1.imagesFolder}",
|
||||
"downscale": 2,
|
||||
"bumpMapping": {
|
||||
"normalFileType": "exr",
|
||||
"enable": true,
|
||||
"bumpType": "Normal",
|
||||
"heightFileType": "exr"
|
||||
},
|
||||
"forceVisibleByAllVertices": false,
|
||||
"fillHoles": false,
|
||||
"multiBandDownscale": 4,
|
||||
"useScore": true,
|
||||
"displacementMapping": {
|
||||
"displacementMappingFileType": "exr",
|
||||
"enable": true
|
||||
},
|
||||
"outputMeshFileType": "obj",
|
||||
"angleHardThreshold": 90.0,
|
||||
"textureSide": 8192,
|
||||
"processColorspace": "sRGB",
|
||||
"input": "{Meshing_1.output}",
|
||||
"useUDIM": true,
|
||||
"subdivisionTargetRatio": 0.8,
|
||||
"padding": 5,
|
||||
"inputRefMesh": "",
|
||||
"correctEV": false,
|
||||
"visibilityRemappingMethod": "PullPush",
|
||||
"inputMesh": "{MeshFiltering_1.outputMesh}",
|
||||
"verboseLevel": "info",
|
||||
"colorMapping": {
|
||||
"enable": true,
|
||||
"colorMappingFileType": "exr"
|
||||
},
|
||||
"bestScoreThreshold": 0.1,
|
||||
"unwrapMethod": "Basic",
|
||||
"multiBandNbContrib": {
|
||||
"high": 1,
|
||||
"midHigh": 5,
|
||||
"low": 0,
|
||||
"midLow": 10
|
||||
},
|
||||
"flipNormals": false
|
||||
},
|
||||
"nodeType": "Texturing",
|
||||
"uids": {
|
||||
"0": "09f72f6745c6b13aae56fc3876e6541fbeaa557d"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"outputTextures": "{cache}/{nodeType}/{uid0}/texture_*.exr",
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/texturedMesh.{outputMeshFileTypeValue}",
|
||||
"outputMaterial": "{cache}/{nodeType}/{uid0}/texturedMesh.mtl",
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
2000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"Meshing_1": {
|
||||
"inputs": {
|
||||
"exportDebugTetrahedralization": false,
|
||||
"useBoundingBox": false,
|
||||
"maxInputPoints": 50000000,
|
||||
"repartition": "multiResolution",
|
||||
"helperPointsGridSize": 10,
|
||||
"seed": 0,
|
||||
"voteFilteringForWeaklySupportedSurfaces": true,
|
||||
"verboseLevel": "info",
|
||||
"outputMeshFileType": "obj",
|
||||
"simGaussianSizeInit": 10.0,
|
||||
"nPixelSizeBehind": 4.0,
|
||||
"fullWeight": 1.0,
|
||||
"depthMapsFolder": "{DepthMapFilter_1.output}",
|
||||
"densify": false,
|
||||
"simFactor": 15.0,
|
||||
"maskHelperPointsWeight": 1.0,
|
||||
"densifyScale": 20.0,
|
||||
"input": "{DepthMapFilter_1.input}",
|
||||
"addLandmarksToTheDensePointCloud": false,
|
||||
"voteMarginFactor": 4.0,
|
||||
"saveRawDensePointCloud": false,
|
||||
"contributeMarginFactor": 2.0,
|
||||
"estimateSpaceMinObservationAngle": 10,
|
||||
"nbSolidAngleFilteringIterations": 2,
|
||||
"minStep": 2,
|
||||
"colorizeOutput": false,
|
||||
"pixSizeMarginFinalCoef": 4.0,
|
||||
"densifyNbFront": 1,
|
||||
"boundingBox": {
|
||||
"bboxScale": {
|
||||
"y": 1.0,
|
||||
"x": 1.0,
|
||||
"z": 1.0
|
||||
},
|
||||
"bboxTranslation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"bboxRotation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
}
|
||||
},
|
||||
"minSolidAngleRatio": 0.2,
|
||||
"maxPoints": 5000000,
|
||||
"addMaskHelperPoints": false,
|
||||
"maxPointsPerVoxel": 1000000,
|
||||
"angleFactor": 15.0,
|
||||
"partitioning": "singleBlock",
|
||||
"estimateSpaceFromSfM": true,
|
||||
"minAngleThreshold": 1.0,
|
||||
"pixSizeMarginInitCoef": 2.0,
|
||||
"refineFuse": true,
|
||||
"maxNbConnectedHelperPoints": 50,
|
||||
"estimateSpaceMinObservations": 3,
|
||||
"invertTetrahedronBasedOnNeighborsNbIterations": 10,
|
||||
"maskBorderSize": 4,
|
||||
"simGaussianSize": 10.0,
|
||||
"densifyNbBack": 1
|
||||
},
|
||||
"nodeType": "Meshing",
|
||||
"uids": {
|
||||
"0": "aeb66fceaacd37ecd5bae8364bd9e87ccff2a84c"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/densePointCloud.abc",
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/mesh.{outputMeshFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
1600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"DepthMapFilter_1": {
|
||||
"inputs": {
|
||||
"minNumOfConsistentCamsWithLowSimilarity": 4,
|
||||
"computeNormalMaps": false,
|
||||
"minNumOfConsistentCams": 3,
|
||||
"depthMapsFolder": "{DepthMap_1.output}",
|
||||
"verboseLevel": "info",
|
||||
"nNearestCams": 10,
|
||||
"pixSizeBallWithLowSimilarity": 0,
|
||||
"pixToleranceFactor": 2.0,
|
||||
"pixSizeBall": 0,
|
||||
"minViewAngle": 2.0,
|
||||
"maxViewAngle": 70.0,
|
||||
"input": "{DepthMap_1.input}"
|
||||
},
|
||||
"nodeType": "DepthMapFilter",
|
||||
"uids": {
|
||||
"0": "4de4649a857d7bd4f7fdfb27470a5087625ff8c9"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 10,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 5,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 40,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"method": "SequentialAndVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "46fb9072ac753d60bec7dda9c8674b0568506ddf"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "normal",
|
||||
"gridFiltering": true,
|
||||
"input": "{CameraInit_1.output}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "a07fb8d05b63327d05461954c2fd2a00f201275b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"StructureFromMotion_1": {
|
||||
"inputs": {
|
||||
"localizerEstimatorMaxIterations": 4096,
|
||||
"minAngleForLandmark": 2.0,
|
||||
"filterTrackForks": false,
|
||||
"minNumberOfObservationsForTriangulation": 2,
|
||||
"maxAngleInitialPair": 40.0,
|
||||
"observationConstraint": "Scale",
|
||||
"maxNumberOfMatches": 0,
|
||||
"localizerEstimator": "acransac",
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"lockScenePreviouslyReconstructed": false,
|
||||
"localBAGraphDistance": 1,
|
||||
"minNbCamerasToRefinePrincipalPoint": 3,
|
||||
"lockAllIntrinsics": false,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}",
|
||||
"useRigConstraint": true,
|
||||
"rigMinNbCamerasForCalibration": 20,
|
||||
"initialPairA": "",
|
||||
"initialPairB": "",
|
||||
"interFileExtension": ".abc",
|
||||
"useLocalBA": true,
|
||||
"computeStructureColor": true,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"minInputTrackLength": 2,
|
||||
"useOnlyMatchesFromInputFolder": false,
|
||||
"verboseLevel": "info",
|
||||
"minAngleForTriangulation": 3.0,
|
||||
"maxReprojectionError": 4.0,
|
||||
"minAngleInitialPair": 5.0,
|
||||
"minNumberOfMatches": 0,
|
||||
"localizerEstimatorError": 0.0
|
||||
},
|
||||
"nodeType": "StructureFromMotion",
|
||||
"uids": {
|
||||
"0": "89c3db0849ba07dfac5e97ca9e27dd690dc476ce"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/sfm.abc",
|
||||
"extraInfoFolder": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"PrepareDenseScene_1": {
|
||||
"inputs": {
|
||||
"imagesFolders": [],
|
||||
"masksFolders": [],
|
||||
"outputFileType": "exr",
|
||||
"verboseLevel": "info",
|
||||
"saveMatricesTxtFiles": false,
|
||||
"saveMetadata": true,
|
||||
"input": "{StructureFromMotion_1.output}",
|
||||
"evCorrection": false
|
||||
},
|
||||
"nodeType": "PrepareDenseScene",
|
||||
"uids": {
|
||||
"0": "894725f62ffeead1307d9d91852b07d7c8453625"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputUndistorted": "{cache}/{nodeType}/{uid0}/*.{outputFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye4",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"DepthMap_1": {
|
||||
"inputs": {
|
||||
"sgmMaxDepthsPerTc": 1500,
|
||||
"sgmP2": 100.0,
|
||||
"imagesFolder": "{PrepareDenseScene_1.output}",
|
||||
"downscale": 2,
|
||||
"refineMaxTCams": 6,
|
||||
"exportIntermediateResults": false,
|
||||
"nbGPUs": 0,
|
||||
"refineNiters": 100,
|
||||
"refineGammaP": 8.0,
|
||||
"refineGammaC": 15.5,
|
||||
"sgmMaxDepths": 3000,
|
||||
"sgmUseSfmSeeds": true,
|
||||
"input": "{PrepareDenseScene_1.input}",
|
||||
"refineWSH": 3,
|
||||
"sgmP1": 10.0,
|
||||
"sgmFilteringAxes": "YX",
|
||||
"sgmMaxTCams": 10,
|
||||
"refineSigma": 15,
|
||||
"sgmScale": -1,
|
||||
"minViewAngle": 2.0,
|
||||
"maxViewAngle": 70.0,
|
||||
"sgmGammaC": 5.5,
|
||||
"sgmWSH": 4,
|
||||
"refineNSamplesHalf": 150,
|
||||
"sgmMaxSideXY": 700,
|
||||
"refineUseTcOrRcPixSize": false,
|
||||
"verboseLevel": "info",
|
||||
"sgmGammaP": 8.0,
|
||||
"sgmStepXY": -1,
|
||||
"refineNDepthsToRefine": 31,
|
||||
"sgmStepZ": -1
|
||||
},
|
||||
"nodeType": "DepthMap",
|
||||
"uids": {
|
||||
"0": "f5ef2fd13dad8f48fcb87e2364e1e821a9db7d2d"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 3,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"MeshFiltering_1": {
|
||||
"inputs": {
|
||||
"filteringSubset": "all",
|
||||
"outputMeshFileType": "obj",
|
||||
"inputMesh": "{Meshing_1.outputMesh}",
|
||||
"filterTrianglesRatio": 0.0,
|
||||
"smoothingSubset": "all",
|
||||
"verboseLevel": "info",
|
||||
"smoothingIterations": 5,
|
||||
"filterLargeTrianglesFactor": 60.0,
|
||||
"keepLargestMeshOnly": false,
|
||||
"smoothingBoundariesNeighbours": 0,
|
||||
"smoothingLambda": 1.0,
|
||||
"filteringIterations": 1
|
||||
},
|
||||
"nodeType": "MeshFiltering",
|
||||
"uids": {
|
||||
"0": "febb162c4fbce195f6d312bbb80697720a2f52b9"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/mesh.{outputMeshFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
1800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{ImageMatching_1.input}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "3b1f2c3fcfe0b94c65627c397a2671ba7594827d"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
486
meshroom/pipelines/photogrammetryAndCameraTracking.mg
Normal file
486
meshroom/pipelines/photogrammetryAndCameraTracking.mg
Normal file
|
@ -0,0 +1,486 @@
|
|||
{
|
||||
"header": {
|
||||
"pipelineVersion": "2.2",
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1",
|
||||
"nodesVersions": {
|
||||
"ExportAnimatedCamera": "2.0",
|
||||
"FeatureMatching": "2.0",
|
||||
"DistortionCalibration": "2.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatchingMultiSfM": "1.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"StructureFromMotion": "2.0"
|
||||
}
|
||||
},
|
||||
"graph": {
|
||||
"DistortionCalibration_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"input": "{CameraInit_2.output}",
|
||||
"lensGrid": []
|
||||
},
|
||||
"nodeType": "DistortionCalibration",
|
||||
"uids": {
|
||||
"0": "8afea9d171904cdb6ba1c0b116cb60de3ccb6fb4"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"outSfMData": "{cache}/{nodeType}/{uid0}/sfmData.sfm"
|
||||
},
|
||||
"position": [
|
||||
1024,
|
||||
393
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 5,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 40,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"method": "SequentialAndVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "46fb9072ac753d60bec7dda9c8674b0568506ddf"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "normal",
|
||||
"gridFiltering": true,
|
||||
"input": "{CameraInit_1.output}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "a07fb8d05b63327d05461954c2fd2a00f201275b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"StructureFromMotion_1": {
|
||||
"inputs": {
|
||||
"localizerEstimatorMaxIterations": 4096,
|
||||
"minAngleForLandmark": 2.0,
|
||||
"filterTrackForks": false,
|
||||
"minNumberOfObservationsForTriangulation": 2,
|
||||
"maxAngleInitialPair": 40.0,
|
||||
"observationConstraint": "Scale",
|
||||
"maxNumberOfMatches": 0,
|
||||
"localizerEstimator": "acransac",
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"lockScenePreviouslyReconstructed": false,
|
||||
"localBAGraphDistance": 1,
|
||||
"minNbCamerasToRefinePrincipalPoint": 3,
|
||||
"lockAllIntrinsics": false,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}",
|
||||
"useRigConstraint": true,
|
||||
"rigMinNbCamerasForCalibration": 20,
|
||||
"initialPairA": "",
|
||||
"initialPairB": "",
|
||||
"interFileExtension": ".abc",
|
||||
"useLocalBA": true,
|
||||
"computeStructureColor": true,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"minInputTrackLength": 2,
|
||||
"useOnlyMatchesFromInputFolder": false,
|
||||
"verboseLevel": "info",
|
||||
"minAngleForTriangulation": 3.0,
|
||||
"maxReprojectionError": 4.0,
|
||||
"minAngleInitialPair": 5.0,
|
||||
"minNumberOfMatches": 0,
|
||||
"localizerEstimatorError": 0.0
|
||||
},
|
||||
"nodeType": "StructureFromMotion",
|
||||
"uids": {
|
||||
"0": "89c3db0849ba07dfac5e97ca9e27dd690dc476ce"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/sfm.abc",
|
||||
"extraInfoFolder": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ExportAnimatedCamera_1": {
|
||||
"inputs": {
|
||||
"exportFullROD": false,
|
||||
"undistortedImageType": "exr",
|
||||
"exportUVMaps": true,
|
||||
"verboseLevel": "info",
|
||||
"sfmDataFilter": "{StructureFromMotion_1.output}",
|
||||
"exportUndistortedImages": false,
|
||||
"input": "{StructureFromMotion_2.output}",
|
||||
"viewFilter": "",
|
||||
"correctPrincipalPoint": true
|
||||
},
|
||||
"nodeType": "ExportAnimatedCamera",
|
||||
"uids": {
|
||||
"0": "6f482ab9e161bd79341c5cd4a43ab9f8e39aec1f"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputUndistorted": "{cache}/{nodeType}/{uid0}/undistort",
|
||||
"outputCamera": "{cache}/{nodeType}/{uid0}/camera.abc"
|
||||
},
|
||||
"position": [
|
||||
1629,
|
||||
212
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye4",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatchingMultiSfM_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"matchingMode": "a/a+a/b",
|
||||
"nbNeighbors": 10,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"nbMatches": 5,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"maxDescriptors": 500,
|
||||
"input": "{FeatureExtraction_2.input}",
|
||||
"inputB": "{StructureFromMotion_1.output}",
|
||||
"method": "SequentialAndVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_2.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatchingMultiSfM",
|
||||
"uids": {
|
||||
"0": "ef147c1bc069c7689863c7e14cdbbaca86af4006"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt",
|
||||
"outputCombinedSfM": "{cache}/{nodeType}/{uid0}/combineSfM.sfm"
|
||||
},
|
||||
"position": [
|
||||
1029,
|
||||
212
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_2": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye4",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
-2,
|
||||
223
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_2": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "normal",
|
||||
"gridFiltering": true,
|
||||
"input": "{CameraInit_2.output}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "a07fb8d05b63327d05461954c2fd2a00f201275b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
198,
|
||||
223
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_2": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_2.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatchingMultiSfM_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{DistortionCalibration_1.outSfMData}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatchingMultiSfM_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "7bb42f40b3f607da7e9f5f432409ddf6ef9c5951"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1229,
|
||||
212
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{ImageMatching_1.input}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "3b1f2c3fcfe0b94c65627c397a2671ba7594827d"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"StructureFromMotion_2": {
|
||||
"inputs": {
|
||||
"localizerEstimatorMaxIterations": 4096,
|
||||
"minAngleForLandmark": 0.5,
|
||||
"filterTrackForks": false,
|
||||
"minNumberOfObservationsForTriangulation": 3,
|
||||
"maxAngleInitialPair": 40.0,
|
||||
"observationConstraint": "Scale",
|
||||
"maxNumberOfMatches": 0,
|
||||
"localizerEstimator": "acransac",
|
||||
"describerTypes": "{FeatureMatching_2.describerTypes}",
|
||||
"lockScenePreviouslyReconstructed": false,
|
||||
"localBAGraphDistance": 1,
|
||||
"minNbCamerasToRefinePrincipalPoint": 3,
|
||||
"lockAllIntrinsics": false,
|
||||
"input": "{FeatureMatching_2.input}",
|
||||
"featuresFolders": "{FeatureMatching_2.featuresFolders}",
|
||||
"useRigConstraint": true,
|
||||
"rigMinNbCamerasForCalibration": 20,
|
||||
"initialPairA": "",
|
||||
"initialPairB": "",
|
||||
"interFileExtension": ".abc",
|
||||
"useLocalBA": true,
|
||||
"computeStructureColor": true,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_2.output}"
|
||||
],
|
||||
"minInputTrackLength": 5,
|
||||
"useOnlyMatchesFromInputFolder": false,
|
||||
"verboseLevel": "info",
|
||||
"minAngleForTriangulation": 1.0,
|
||||
"maxReprojectionError": 4.0,
|
||||
"minAngleInitialPair": 5.0,
|
||||
"minNumberOfMatches": 0,
|
||||
"localizerEstimatorError": 0.0
|
||||
},
|
||||
"nodeType": "StructureFromMotion",
|
||||
"uids": {
|
||||
"0": "4bc466c45bc7b430553752d1eb1640c581c43e36"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/sfm.abc",
|
||||
"extraInfoFolder": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
1429,
|
||||
212
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
404
meshroom/pipelines/photogrammetryDraft.mg
Normal file
404
meshroom/pipelines/photogrammetryDraft.mg
Normal file
|
@ -0,0 +1,404 @@
|
|||
{
|
||||
"header": {
|
||||
"nodesVersions": {
|
||||
"FeatureMatching": "2.0",
|
||||
"MeshFiltering": "3.0",
|
||||
"Texturing": "6.0",
|
||||
"Meshing": "7.0",
|
||||
"CameraInit": "7.0",
|
||||
"ImageMatching": "2.0",
|
||||
"FeatureExtraction": "1.1",
|
||||
"StructureFromMotion": "2.0"
|
||||
},
|
||||
"releaseVersion": "2021.1.0",
|
||||
"fileVersion": "1.1"
|
||||
},
|
||||
"graph": {
|
||||
"Texturing_1": {
|
||||
"inputs": {
|
||||
"imagesFolder": "",
|
||||
"downscale": 2,
|
||||
"bumpMapping": {
|
||||
"normalFileType": "exr",
|
||||
"enable": true,
|
||||
"bumpType": "Normal",
|
||||
"heightFileType": "exr"
|
||||
},
|
||||
"forceVisibleByAllVertices": false,
|
||||
"fillHoles": false,
|
||||
"multiBandDownscale": 4,
|
||||
"useScore": true,
|
||||
"displacementMapping": {
|
||||
"displacementMappingFileType": "exr",
|
||||
"enable": true
|
||||
},
|
||||
"outputMeshFileType": "obj",
|
||||
"angleHardThreshold": 90.0,
|
||||
"textureSide": 8192,
|
||||
"processColorspace": "sRGB",
|
||||
"input": "{Meshing_1.output}",
|
||||
"useUDIM": true,
|
||||
"subdivisionTargetRatio": 0.8,
|
||||
"padding": 5,
|
||||
"inputRefMesh": "",
|
||||
"correctEV": false,
|
||||
"visibilityRemappingMethod": "PullPush",
|
||||
"inputMesh": "{MeshFiltering_1.outputMesh}",
|
||||
"verboseLevel": "info",
|
||||
"colorMapping": {
|
||||
"enable": true,
|
||||
"colorMappingFileType": "exr"
|
||||
},
|
||||
"bestScoreThreshold": 0.1,
|
||||
"unwrapMethod": "Basic",
|
||||
"multiBandNbContrib": {
|
||||
"high": 1,
|
||||
"midHigh": 5,
|
||||
"low": 0,
|
||||
"midLow": 10
|
||||
},
|
||||
"flipNormals": false
|
||||
},
|
||||
"nodeType": "Texturing",
|
||||
"uids": {
|
||||
"0": "1ed1516bf83493071547e69146be3f1218012e25"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"outputTextures": "{cache}/{nodeType}/{uid0}/texture_*.exr",
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/texturedMesh.{outputMeshFileTypeValue}",
|
||||
"outputMaterial": "{cache}/{nodeType}/{uid0}/texturedMesh.mtl",
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
1400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"Meshing_1": {
|
||||
"inputs": {
|
||||
"exportDebugTetrahedralization": false,
|
||||
"useBoundingBox": false,
|
||||
"maxInputPoints": 50000000,
|
||||
"repartition": "multiResolution",
|
||||
"helperPointsGridSize": 10,
|
||||
"seed": 0,
|
||||
"voteFilteringForWeaklySupportedSurfaces": true,
|
||||
"verboseLevel": "info",
|
||||
"outputMeshFileType": "obj",
|
||||
"simGaussianSizeInit": 10.0,
|
||||
"nPixelSizeBehind": 4.0,
|
||||
"fullWeight": 1.0,
|
||||
"depthMapsFolder": "",
|
||||
"densify": false,
|
||||
"simFactor": 15.0,
|
||||
"maskHelperPointsWeight": 1.0,
|
||||
"densifyScale": 20.0,
|
||||
"input": "{StructureFromMotion_1.output}",
|
||||
"addLandmarksToTheDensePointCloud": false,
|
||||
"voteMarginFactor": 4.0,
|
||||
"saveRawDensePointCloud": false,
|
||||
"contributeMarginFactor": 2.0,
|
||||
"estimateSpaceMinObservationAngle": 10,
|
||||
"nbSolidAngleFilteringIterations": 2,
|
||||
"minStep": 2,
|
||||
"colorizeOutput": false,
|
||||
"pixSizeMarginFinalCoef": 4.0,
|
||||
"densifyNbFront": 1,
|
||||
"boundingBox": {
|
||||
"bboxScale": {
|
||||
"y": 1.0,
|
||||
"x": 1.0,
|
||||
"z": 1.0
|
||||
},
|
||||
"bboxTranslation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
},
|
||||
"bboxRotation": {
|
||||
"y": 0.0,
|
||||
"x": 0.0,
|
||||
"z": 0.0
|
||||
}
|
||||
},
|
||||
"minSolidAngleRatio": 0.2,
|
||||
"maxPoints": 5000000,
|
||||
"addMaskHelperPoints": false,
|
||||
"maxPointsPerVoxel": 1000000,
|
||||
"angleFactor": 15.0,
|
||||
"partitioning": "singleBlock",
|
||||
"estimateSpaceFromSfM": true,
|
||||
"minAngleThreshold": 1.0,
|
||||
"pixSizeMarginInitCoef": 2.0,
|
||||
"refineFuse": true,
|
||||
"maxNbConnectedHelperPoints": 50,
|
||||
"estimateSpaceMinObservations": 3,
|
||||
"invertTetrahedronBasedOnNeighborsNbIterations": 10,
|
||||
"maskBorderSize": 4,
|
||||
"simGaussianSize": 10.0,
|
||||
"densifyNbBack": 1
|
||||
},
|
||||
"nodeType": "Meshing",
|
||||
"uids": {
|
||||
"0": "dc3d06f150a2601334a44174aa8e5523d3055468"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/densePointCloud.abc",
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/mesh.{outputMeshFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
1000,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"ImageMatching_1": {
|
||||
"inputs": {
|
||||
"minNbImages": 200,
|
||||
"nbNeighbors": 5,
|
||||
"tree": "${ALICEVISION_VOCTREE}",
|
||||
"maxDescriptors": 500,
|
||||
"verboseLevel": "info",
|
||||
"weights": "",
|
||||
"nbMatches": 40,
|
||||
"input": "{FeatureExtraction_1.input}",
|
||||
"method": "SequentialAndVocabularyTree",
|
||||
"featuresFolders": [
|
||||
"{FeatureExtraction_1.output}"
|
||||
]
|
||||
},
|
||||
"nodeType": "ImageMatching",
|
||||
"uids": {
|
||||
"0": "46fb9072ac753d60bec7dda9c8674b0568506ddf"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/imageMatches.txt"
|
||||
},
|
||||
"position": [
|
||||
400,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureExtraction_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"maxThreads": 0,
|
||||
"describerTypes": [
|
||||
"dspsift"
|
||||
],
|
||||
"maxNbFeatures": 0,
|
||||
"relativePeakThreshold": 0.01,
|
||||
"forceCpuExtraction": true,
|
||||
"masksFolder": "",
|
||||
"contrastFiltering": "GridSort",
|
||||
"describerQuality": "normal",
|
||||
"gridFiltering": true,
|
||||
"input": "{CameraInit_1.output}",
|
||||
"describerPreset": "normal"
|
||||
},
|
||||
"nodeType": "FeatureExtraction",
|
||||
"uids": {
|
||||
"0": "a07fb8d05b63327d05461954c2fd2a00f201275b"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 40,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"StructureFromMotion_1": {
|
||||
"inputs": {
|
||||
"localizerEstimatorMaxIterations": 4096,
|
||||
"minAngleForLandmark": 2.0,
|
||||
"filterTrackForks": false,
|
||||
"minNumberOfObservationsForTriangulation": 2,
|
||||
"maxAngleInitialPair": 40.0,
|
||||
"observationConstraint": "Scale",
|
||||
"maxNumberOfMatches": 0,
|
||||
"localizerEstimator": "acransac",
|
||||
"describerTypes": "{FeatureMatching_1.describerTypes}",
|
||||
"lockScenePreviouslyReconstructed": false,
|
||||
"localBAGraphDistance": 1,
|
||||
"minNbCamerasToRefinePrincipalPoint": 3,
|
||||
"lockAllIntrinsics": false,
|
||||
"input": "{FeatureMatching_1.input}",
|
||||
"featuresFolders": "{FeatureMatching_1.featuresFolders}",
|
||||
"useRigConstraint": true,
|
||||
"rigMinNbCamerasForCalibration": 20,
|
||||
"initialPairA": "",
|
||||
"initialPairB": "",
|
||||
"interFileExtension": ".abc",
|
||||
"useLocalBA": true,
|
||||
"computeStructureColor": true,
|
||||
"matchesFolders": [
|
||||
"{FeatureMatching_1.output}"
|
||||
],
|
||||
"minInputTrackLength": 2,
|
||||
"useOnlyMatchesFromInputFolder": false,
|
||||
"verboseLevel": "info",
|
||||
"minAngleForTriangulation": 3.0,
|
||||
"maxReprojectionError": 4.0,
|
||||
"minAngleInitialPair": 5.0,
|
||||
"minNumberOfMatches": 0,
|
||||
"localizerEstimatorError": 0.0
|
||||
},
|
||||
"nodeType": "StructureFromMotion",
|
||||
"uids": {
|
||||
"0": "89c3db0849ba07dfac5e97ca9e27dd690dc476ce"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/sfm.abc",
|
||||
"extraInfoFolder": "{cache}/{nodeType}/{uid0}/",
|
||||
"outputViewsAndPoses": "{cache}/{nodeType}/{uid0}/cameras.sfm"
|
||||
},
|
||||
"position": [
|
||||
800,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"CameraInit_1": {
|
||||
"inputs": {
|
||||
"groupCameraFallback": "folder",
|
||||
"intrinsics": [],
|
||||
"viewIdRegex": ".*?(\\d+)",
|
||||
"defaultFieldOfView": 45.0,
|
||||
"allowedCameraModels": [
|
||||
"pinhole",
|
||||
"radial1",
|
||||
"radial3",
|
||||
"brown",
|
||||
"fisheye4",
|
||||
"fisheye1",
|
||||
"3deanamorphic4",
|
||||
"3deradial4",
|
||||
"3declassicld"
|
||||
],
|
||||
"verboseLevel": "info",
|
||||
"viewIdMethod": "metadata",
|
||||
"viewpoints": [],
|
||||
"useInternalWhiteBalance": true,
|
||||
"sensorDatabase": "${ALICEVISION_SENSOR_DB}"
|
||||
},
|
||||
"nodeType": "CameraInit",
|
||||
"uids": {
|
||||
"0": "f9436e97e444fa71a05aa5cf7639b206df8ba282"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/cameraInit.sfm"
|
||||
},
|
||||
"position": [
|
||||
0,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"MeshFiltering_1": {
|
||||
"inputs": {
|
||||
"filteringSubset": "all",
|
||||
"outputMeshFileType": "obj",
|
||||
"inputMesh": "{Meshing_1.outputMesh}",
|
||||
"filterTrianglesRatio": 0.0,
|
||||
"smoothingSubset": "all",
|
||||
"verboseLevel": "info",
|
||||
"smoothingIterations": 5,
|
||||
"filterLargeTrianglesFactor": 60.0,
|
||||
"keepLargestMeshOnly": false,
|
||||
"smoothingBoundariesNeighbours": 0,
|
||||
"smoothingLambda": 1.0,
|
||||
"filteringIterations": 1
|
||||
},
|
||||
"nodeType": "MeshFiltering",
|
||||
"uids": {
|
||||
"0": "057d1647de39a617f79aad02a721938e5625ff64"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 0,
|
||||
"split": 1,
|
||||
"size": 1
|
||||
},
|
||||
"outputs": {
|
||||
"outputMesh": "{cache}/{nodeType}/{uid0}/mesh.{outputMeshFileTypeValue}"
|
||||
},
|
||||
"position": [
|
||||
1200,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"FeatureMatching_1": {
|
||||
"inputs": {
|
||||
"verboseLevel": "info",
|
||||
"describerTypes": "{FeatureExtraction_1.describerTypes}",
|
||||
"exportDebugFiles": false,
|
||||
"crossMatching": false,
|
||||
"geometricError": 0.0,
|
||||
"maxMatches": 0,
|
||||
"matchFromKnownCameraPoses": false,
|
||||
"savePutativeMatches": false,
|
||||
"guidedMatching": false,
|
||||
"imagePairsList": "{ImageMatching_1.output}",
|
||||
"geometricEstimator": "acransac",
|
||||
"geometricFilterType": "fundamental_matrix",
|
||||
"maxIteration": 2048,
|
||||
"distanceRatio": 0.8,
|
||||
"input": "{ImageMatching_1.input}",
|
||||
"photometricMatchingMethod": "ANN_L2",
|
||||
"knownPosesGeometricErrorMax": 5.0,
|
||||
"featuresFolders": "{ImageMatching_1.featuresFolders}"
|
||||
},
|
||||
"nodeType": "FeatureMatching",
|
||||
"uids": {
|
||||
"0": "3b1f2c3fcfe0b94c65627c397a2671ba7594827d"
|
||||
},
|
||||
"parallelization": {
|
||||
"blockSize": 20,
|
||||
"split": 0,
|
||||
"size": 0
|
||||
},
|
||||
"outputs": {
|
||||
"output": "{cache}/{nodeType}/{uid0}/"
|
||||
},
|
||||
"position": [
|
||||
600,
|
||||
0
|
||||
],
|
||||
"internalFolder": "{cache}/{nodeType}/{uid0}/"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
|
||||
from PySide2.QtCore import Qt, QUrl, Slot, QJsonValue, Property, Signal, qInstallMessageHandler, QtMsgType, QSettings
|
||||
|
@ -50,9 +51,19 @@ class MessageHandler(object):
|
|||
@classmethod
|
||||
def handler(cls, messageType, context, message):
|
||||
""" Message handler remapping Qt logs to Python logging system. """
|
||||
# discard blacklisted Qt messages related to QML when 'output qml warnings' is set to false
|
||||
if not cls.outputQmlWarnings and any(w in message for w in cls.qmlWarningsBlacklist):
|
||||
return
|
||||
|
||||
if not cls.outputQmlWarnings:
|
||||
# If MESHROOM_OUTPUT_QML_WARNINGS is not set and an error in qml files happen we're
|
||||
# left without any output except "QQmlApplicationEngine failed to load component".
|
||||
# This is extremely hard to debug to someone who does not know about
|
||||
# MESHROOM_OUTPUT_QML_WARNINGS beforehand because by default Qml will output errors to
|
||||
# stdout.
|
||||
if "QQmlApplicationEngine failed to load component" in message:
|
||||
logging.warning("Set MESHROOM_OUTPUT_QML_WARNINGS=1 to get a detailed error message.")
|
||||
|
||||
# discard blacklisted Qt messages related to QML when 'output qml warnings' is not enabled
|
||||
elif any(w in message for w in cls.qmlWarningsBlacklist):
|
||||
return
|
||||
MessageHandler.logFunctions[messageType](message)
|
||||
|
||||
|
||||
|
@ -71,8 +82,9 @@ class MeshroomApp(QApplication):
|
|||
help='Import images to reconstruct from specified folder and sub-folders.')
|
||||
parser.add_argument('-s', '--save', metavar='PROJECT.mg', type=str, default='',
|
||||
help='Save the created scene.')
|
||||
parser.add_argument('-p', '--pipeline', metavar='MESHROOM_FILE/photogrammetry/panoramaHdr/panoramaFisheyeHdr', type=str, default=os.environ.get("MESHROOM_DEFAULT_PIPELINE", "photogrammetry"),
|
||||
help='Override the default Meshroom pipeline with this external graph.')
|
||||
parser.add_argument('-p', '--pipeline', metavar="FILE.mg/" + "/".join(meshroom.core.pipelineTemplates), type=str,
|
||||
default=os.environ.get("MESHROOM_DEFAULT_PIPELINE", "photogrammetry"),
|
||||
help='Override the default Meshroom pipeline with this external or template graph.')
|
||||
parser.add_argument("--verbose", help="Verbosity level", default='warning',
|
||||
choices=['fatal', 'error', 'warning', 'info', 'debug', 'trace'],)
|
||||
|
||||
|
@ -176,6 +188,16 @@ class MeshroomApp(QApplication):
|
|||
|
||||
self.engine.load(os.path.normpath(url))
|
||||
|
||||
def _pipelineTemplateFiles(self):
|
||||
templates = []
|
||||
for key in sorted(meshroom.core.pipelineTemplates.keys()):
|
||||
# Use uppercase letters in the names as separators to format the templates' name nicely
|
||||
# e.g: the template "panoramaHdr" will be shown as "Panorama Hdr" in the menu
|
||||
name = " ".join(re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]))
|
||||
variant = {"name": name, "key": key, "path": meshroom.core.pipelineTemplates[key]}
|
||||
templates.append(variant)
|
||||
return templates
|
||||
|
||||
def _recentProjectFiles(self):
|
||||
projects = []
|
||||
settings = QSettings()
|
||||
|
@ -284,8 +306,7 @@ class MeshroomApp(QApplication):
|
|||
return md
|
||||
return markdown(md)
|
||||
|
||||
@Property(QJsonValue, constant=True)
|
||||
def systemInfo(self):
|
||||
def _systemInfo(self):
|
||||
import platform
|
||||
import sys
|
||||
return {
|
||||
|
@ -293,8 +314,9 @@ class MeshroomApp(QApplication):
|
|||
'python': 'Python {}'.format(sys.version.split(" ")[0])
|
||||
}
|
||||
|
||||
@Property("QVariantList", constant=True)
|
||||
def licensesModel(self):
|
||||
systemInfo = Property(QJsonValue, _systemInfo, constant=True)
|
||||
|
||||
def _licensesModel(self):
|
||||
"""
|
||||
Get info about open-source licenses for the application.
|
||||
Model provides:
|
||||
|
@ -316,6 +338,8 @@ class MeshroomApp(QApplication):
|
|||
}
|
||||
]
|
||||
|
||||
licensesModel = Property("QVariantList", _licensesModel, constant=True)
|
||||
pipelineTemplateFilesChanged = Signal()
|
||||
recentProjectFilesChanged = Signal()
|
||||
pipelineTemplateFiles = Property("QVariantList", _pipelineTemplateFiles, notify=pipelineTemplateFilesChanged)
|
||||
recentProjectFiles = Property("QVariantList", _recentProjectFiles, notify=recentProjectFilesChanged)
|
||||
|
||||
|
|
|
@ -184,7 +184,8 @@ class DuplicateNodesCommand(GraphCommand):
|
|||
|
||||
def redoImpl(self):
|
||||
srcNodes = [ self.graph.node(i) for i in self.srcNodeNames ]
|
||||
duplicates = list(self.graph.duplicateNodes(srcNodes).values())
|
||||
# flatten the list of duplicated nodes to avoid lists within the list
|
||||
duplicates = [ n for nodes in list(self.graph.duplicateNodes(srcNodes).values()) for n in nodes ]
|
||||
self.duplicates = [ n.name for n in duplicates ]
|
||||
return duplicates
|
||||
|
||||
|
|
|
@ -559,9 +559,11 @@ class UIGraph(QObject):
|
|||
"""
|
||||
with self.groupedGraphModification("Remove Nodes From Selected Nodes"):
|
||||
nodesToRemove, _ = self._graph.dfsOnDiscover(startNodes=nodes, reverse=True, dependenciesOnly=True)
|
||||
# filter out nodes that will be removed more than once
|
||||
uniqueNodesToRemove = list(dict.fromkeys(nodesToRemove))
|
||||
# Perform nodes removal from leaves to start node so that edges
|
||||
# can be re-created in correct order on redo.
|
||||
self.removeNodes(list(reversed(nodesToRemove)))
|
||||
self.removeNodes(list(reversed(uniqueNodesToRemove)))
|
||||
|
||||
@Slot(QObject, result="QVariantList")
|
||||
def duplicateNodes(self, nodes):
|
||||
|
@ -574,6 +576,7 @@ class UIGraph(QObject):
|
|||
list[Node]: the list of duplicated nodes
|
||||
"""
|
||||
nodes = self.filterNodes(nodes)
|
||||
nPositions = []
|
||||
# enable updates between duplication and layout to get correct depths during layout
|
||||
with self.groupedGraphModification("Duplicate Selected Nodes", disableUpdates=False):
|
||||
# disable graph updates during duplication
|
||||
|
@ -581,8 +584,19 @@ class UIGraph(QObject):
|
|||
duplicates = self.push(commands.DuplicateNodesCommand(self._graph, nodes))
|
||||
# move nodes below the bounding box formed by the duplicated node(s)
|
||||
bbox = self._layout.boundingBox(duplicates)
|
||||
|
||||
for n in duplicates:
|
||||
self.moveNode(n, Position(n.x, bbox[3] + self.layout.gridSpacing + n.y))
|
||||
idx = duplicates.index(n)
|
||||
yPos = n.y + self.layout.gridSpacing + bbox[3]
|
||||
if idx > 0 and (n.x, yPos) in nPositions:
|
||||
# make sure the node will not be moved on top of another node
|
||||
while (n.x, yPos) in nPositions:
|
||||
yPos = yPos + self.layout.gridSpacing + self.layout.nodeHeight
|
||||
self.moveNode(n, Position(n.x, yPos))
|
||||
else:
|
||||
self.moveNode(n, Position(n.x, bbox[3] + self.layout.gridSpacing + n.y))
|
||||
nPositions.append((n.x, n.y))
|
||||
|
||||
return duplicates
|
||||
|
||||
@Slot(QObject, result="QVariantList")
|
||||
|
@ -597,7 +611,9 @@ class UIGraph(QObject):
|
|||
"""
|
||||
with self.groupedGraphModification("Duplicate Nodes From Selected Nodes"):
|
||||
nodesToDuplicate, _ = self._graph.dfsOnDiscover(startNodes=nodes, reverse=True, dependenciesOnly=True)
|
||||
duplicates = self.duplicateNodes(nodesToDuplicate)
|
||||
# filter out nodes that will be duplicated more than once
|
||||
uniqueNodesToDuplicate = list(dict.fromkeys(nodesToDuplicate))
|
||||
duplicates = self.duplicateNodes(uniqueNodesToDuplicate)
|
||||
return duplicates
|
||||
|
||||
@Slot(QObject)
|
||||
|
|
|
@ -68,7 +68,7 @@ RowLayout {
|
|||
id: paramMenu
|
||||
|
||||
property bool isFileAttribute: attribute.type == "File"
|
||||
property bool isFilepath: isFileAttribute && Filepath.isFile(attribute.value)
|
||||
property bool isFilepath: isFileAttribute && Filepath.isFile(attribute.evalValue)
|
||||
|
||||
MenuItem {
|
||||
text: "Reset To Default Value"
|
||||
|
@ -85,8 +85,8 @@ RowLayout {
|
|||
visible: paramMenu.isFileAttribute
|
||||
height: visible ? implicitHeight : 0
|
||||
text: paramMenu.isFilepath ? "Open Containing Folder" : "Open Folder"
|
||||
onClicked: paramMenu.isFilepath ? Qt.openUrlExternally(Filepath.dirname(attribute.value)) :
|
||||
Qt.openUrlExternally(Filepath.stringToUrl(attribute.value))
|
||||
onClicked: paramMenu.isFilepath ? Qt.openUrlExternally(Filepath.dirname(attribute.evalValue)) :
|
||||
Qt.openUrlExternally(Filepath.stringToUrl(attribute.evalValue))
|
||||
}
|
||||
|
||||
MenuItem {
|
||||
|
|
|
@ -59,7 +59,7 @@ RowLayout {
|
|||
color: Colors.sysPalette.base
|
||||
|
||||
Rectangle {
|
||||
visible: inputConnectMA.containsMouse || childrenRepeater.count > 0 || (attribute && attribute.isLink)
|
||||
visible: inputConnectMA.containsMouse || childrenRepeater.count > 0 || (attribute && attribute.isLink) || inputConnectMA.drag.active || inputDropArea.containsDrag
|
||||
radius: isList ? 0 : 2
|
||||
anchors.fill: parent
|
||||
anchors.margins: 2
|
||||
|
@ -127,18 +127,20 @@ RowLayout {
|
|||
property bool dragAccepted: false
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
width: 4
|
||||
height: 4
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
Drag.keys: [inputDragTarget.objectName]
|
||||
Drag.active: inputConnectMA.drag.active
|
||||
Drag.hotSpot.x: width*0.5
|
||||
Drag.hotSpot.y: height*0.5
|
||||
Drag.hotSpot.x: width * 0.5
|
||||
Drag.hotSpot.y: height * 0.5
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: inputConnectMA
|
||||
drag.target: attribute.isReadOnly ? undefined : inputDragTarget
|
||||
drag.threshold: 0
|
||||
// Move the edge's tip straight to the the current mouse position instead of waiting after the drag operation has started
|
||||
drag.smoothed: false
|
||||
enabled: !root.readOnly
|
||||
anchors.fill: parent
|
||||
// use the same negative margins as DropArea to ease pin selection
|
||||
|
@ -174,6 +176,8 @@ RowLayout {
|
|||
Layout.fillWidth: true
|
||||
implicitHeight: childrenRect.height
|
||||
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
|
||||
Label {
|
||||
id: nameLabel
|
||||
|
||||
|
@ -205,7 +209,7 @@ RowLayout {
|
|||
color: Colors.sysPalette.base
|
||||
|
||||
Rectangle {
|
||||
visible: attribute.hasOutputConnections
|
||||
visible: attribute.hasOutputConnections || outputConnectMA.containsMouse || outputConnectMA.drag.active || outputDropArea.containsDrag
|
||||
radius: isList ? 0 : 2
|
||||
anchors.fill: parent
|
||||
anchors.margins: 2
|
||||
|
@ -269,8 +273,8 @@ RowLayout {
|
|||
property bool dropAccepted: false
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: 4
|
||||
height: 4
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
Drag.keys: [outputDragTarget.objectName]
|
||||
Drag.active: outputConnectMA.drag.active
|
||||
Drag.hotSpot.x: width*0.5
|
||||
|
@ -281,6 +285,8 @@ RowLayout {
|
|||
id: outputConnectMA
|
||||
drag.target: outputDragTarget
|
||||
drag.threshold: 0
|
||||
// Move the edge's tip straight to the the current mouse position instead of waiting after the drag operation has started
|
||||
drag.smoothed: false
|
||||
anchors.fill: parent
|
||||
// use the same negative margins as DropArea to ease pin selection
|
||||
anchors.margins: outputDropArea.anchors.margins
|
||||
|
@ -346,6 +352,7 @@ RowLayout {
|
|||
}
|
||||
StateChangeScript {
|
||||
script: {
|
||||
// Add the right offset if the initial click is not exactly at the center of the connection circle.
|
||||
var pos = inputDragTarget.mapFromItem(inputConnectMA, inputConnectMA.mouseX, inputConnectMA.mouseY);
|
||||
inputDragTarget.x = pos.x - inputDragTarget.width/2;
|
||||
inputDragTarget.y = pos.y - inputDragTarget.height/2;
|
||||
|
|
|
@ -48,6 +48,10 @@ Panel {
|
|||
parseIntr()
|
||||
}
|
||||
|
||||
onCameraInitIndexChanged: {
|
||||
parseIntr()
|
||||
}
|
||||
|
||||
function changeCurrentIndex(newIndex) {
|
||||
_reconstruction.cameraInitIndex = newIndex
|
||||
}
|
||||
|
|
|
@ -409,38 +409,38 @@ ApplicationWindow {
|
|||
onTriggered: ensureSaved(function() { _reconstruction.new() })
|
||||
}
|
||||
Menu {
|
||||
id: newPipelineMenu
|
||||
title: "New Pipeline"
|
||||
TextMetrics {
|
||||
id: textMetrics
|
||||
font: action_PG_CT.font
|
||||
elide: Text.ElideNone
|
||||
text: action_PG_CT.text
|
||||
enabled: newPipelineMenuItems.model != undefined && newPipelineMenuItems.model.length > 0
|
||||
property int maxWidth: 1000
|
||||
property int fullWidth: {
|
||||
var result = 0;
|
||||
for (var i = 0; i < count; ++i) {
|
||||
var item = itemAt(i);
|
||||
result = Math.max(item.implicitWidth + item.padding * 2, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
implicitWidth: textMetrics.width + 10 // largest text width + margin
|
||||
Action {
|
||||
text: "Photogrammetry"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("photogrammetry") })
|
||||
}
|
||||
Action {
|
||||
text: "Panorama HDR"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("panoramahdr") })
|
||||
}
|
||||
Action {
|
||||
text: "Panorama Fisheye HDR"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("panoramafisheyehdr") })
|
||||
}
|
||||
Action {
|
||||
id: action_PG_CT
|
||||
text: "Photogrammetry and Camera Tracking (experimental)"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("photogrammetryandcameratracking") })
|
||||
}
|
||||
Action {
|
||||
text: "Camera Tracking (experimental)"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("cameratracking") })
|
||||
}
|
||||
Action {
|
||||
text: "Photogrammetry Draft (No CUDA)"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("photogrammetrydraft") })
|
||||
implicitWidth: fullWidth
|
||||
Repeater {
|
||||
id: newPipelineMenuItems
|
||||
model: MeshroomApp.pipelineTemplateFiles
|
||||
MenuItem {
|
||||
onTriggered: ensureSaved(function() {
|
||||
_reconstruction.new(modelData["key"])
|
||||
})
|
||||
|
||||
text: fileTextMetrics.elidedText
|
||||
TextMetrics {
|
||||
id: fileTextMetrics
|
||||
text: modelData["name"]
|
||||
elide: Text.ElideLeft
|
||||
elideWidth: newPipelineMenu.maxWidth
|
||||
}
|
||||
ToolTip.text: modelData["path"]
|
||||
ToolTip.visible: hovered
|
||||
ToolTip.delay: 200
|
||||
}
|
||||
}
|
||||
}
|
||||
Action {
|
||||
|
|
|
@ -3,7 +3,6 @@ import logging
|
|||
import math
|
||||
import os
|
||||
from threading import Thread
|
||||
from collections import Iterable
|
||||
|
||||
from PySide2.QtCore import QObject, Slot, Property, Signal, QUrl, QSizeF
|
||||
from PySide2.QtGui import QMatrix4x4, QMatrix3x3, QQuaternion, QVector3D, QVector2D
|
||||
|
@ -14,6 +13,7 @@ from meshroom import multiview
|
|||
from meshroom.common.qt import QObjectListModel
|
||||
from meshroom.core import Version
|
||||
from meshroom.core.node import Node, CompatibilityNode, Status, Position
|
||||
from meshroom.core.pyCompatibility import Iterable
|
||||
from meshroom.ui.graph import UIGraph
|
||||
from meshroom.ui.utils import makeProperty
|
||||
|
||||
|
@ -249,10 +249,8 @@ class ViewpointWrapper(QObject):
|
|||
self._undistortedImagePath = os.path.join(self._activeNode_PrepareDenseScene.node.output.value, filename)
|
||||
self.denseSceneParamsChanged.emit()
|
||||
|
||||
@Property(type=QObject, constant=True)
|
||||
def attribute(self):
|
||||
""" Get the underlying Viewpoint attribute wrapped by this Viewpoint. """
|
||||
return self._viewpoint
|
||||
# Get the underlying Viewpoint attribute wrapped by this Viewpoint.
|
||||
attribute = Property(QObject, lambda self: self._viewpoint, constant=True)
|
||||
|
||||
@Property(type="QVariant", notify=initialParamsChanged)
|
||||
def initialIntrinsics(self):
|
||||
|
@ -483,27 +481,15 @@ class Reconstruction(UIGraph):
|
|||
@Slot(str)
|
||||
def new(self, pipeline=None):
|
||||
p = pipeline if pipeline != None else self._defaultPipeline
|
||||
""" Create a new photogrammetry pipeline. """
|
||||
if p.lower() == "photogrammetry":
|
||||
# default photogrammetry pipeline
|
||||
self.setGraph(multiview.photogrammetry())
|
||||
elif p.lower() == "panoramahdr":
|
||||
# default panorama hdr pipeline
|
||||
self.setGraph(multiview.panoramaHdr())
|
||||
elif p.lower() == "panoramafisheyehdr":
|
||||
# default panorama fisheye hdr pipeline
|
||||
self.setGraph(multiview.panoramaFisheyeHdr())
|
||||
elif p.lower() == "photogrammetryandcameratracking":
|
||||
# default camera tracking pipeline
|
||||
self.setGraph(multiview.photogrammetryAndCameraTracking())
|
||||
elif p.lower() == "cameratracking":
|
||||
# default camera tracking pipeline
|
||||
self.setGraph(multiview.cameraTracking())
|
||||
elif p.lower() == "photogrammetrydraft":
|
||||
# photogrammetry pipeline in draft mode (no cuda)
|
||||
self.setGraph(multiview.photogrammetryDraft())
|
||||
""" Create a new pipeline. """
|
||||
# Lower the input and the dictionary keys to make sure that all input types can be found:
|
||||
# - correct pipeline name but the case does not match (e.g. panoramaHDR instead of panoramaHdr)
|
||||
# - lowercase pipeline name given through the "New Pipeline" menu
|
||||
loweredPipelineTemplates = dict((k.lower(), v) for k, v in meshroom.core.pipelineTemplates.items())
|
||||
if p.lower() in loweredPipelineTemplates:
|
||||
self.load(loweredPipelineTemplates[p.lower()], setupProjectFile=False)
|
||||
else:
|
||||
# use the user-provided default photogrammetry project file
|
||||
# use the user-provided default project file
|
||||
self.load(p, setupProjectFile=False)
|
||||
|
||||
@Slot(str, result=bool)
|
||||
|
|
|
@ -9,7 +9,8 @@ REM set MESHROOM_OUTPUT_QML_WARNINGS=1
|
|||
REM set MESHROOM_INSTANT_CODING=1
|
||||
REM set QT_PLUGIN_PATH=C:\dev\meshroom\install
|
||||
REM set QML2_IMPORT_PATH=C:\dev\meshroom\install\qml
|
||||
REM set PATH=C:\dev\AliceVision\install\bin;C:\dev\vcpkg\installed\x64-windows\bin
|
||||
REM set PATH=C:\dev\AliceVision\install\bin;C:\dev\vcpkg\installed\x64-windows\bin;%PATH%
|
||||
REM set ALICEVISION_ROOT=C:\dev\AliceVision\install
|
||||
|
||||
python meshroom\ui
|
||||
|
||||
|
|
|
@ -266,14 +266,16 @@ def test_duplicate_nodes():
|
|||
# duplicate from n1
|
||||
nodes_to_duplicate, _ = g.dfsOnDiscover(startNodes=[n1], reverse=True, dependenciesOnly=True)
|
||||
nMap = g.duplicateNodes(srcNodes=nodes_to_duplicate)
|
||||
for s, d in nMap.items():
|
||||
assert s.nodeType == d.nodeType
|
||||
for s, duplicated in nMap.items():
|
||||
for d in duplicated:
|
||||
assert s.nodeType == d.nodeType
|
||||
|
||||
# check number of duplicated nodes
|
||||
assert len(nMap) == 3
|
||||
# check number of duplicated nodes and that every parent node has been duplicated once
|
||||
assert len(nMap) == 3 and all([len(nMap[i]) == 1 for i in nMap.keys()])
|
||||
|
||||
# check connections
|
||||
assert nMap[n1].input.getLinkParam() == n0.output
|
||||
assert nMap[n2].input.getLinkParam() == nMap[n1].output
|
||||
assert nMap[n3].input.getLinkParam() == nMap[n1].output
|
||||
assert nMap[n3].input2.getLinkParam() == nMap[n2].output
|
||||
# access directly index 0 because we know there is a single duplicate for each parent node
|
||||
assert nMap[n1][0].input.getLinkParam() == n0.output
|
||||
assert nMap[n2][0].input.getLinkParam() == nMap[n1][0].output
|
||||
assert nMap[n3][0].input.getLinkParam() == nMap[n1][0].output
|
||||
assert nMap[n3][0].input2.getLinkParam() == nMap[n2][0].output
|
||||
|
|
|
@ -9,18 +9,34 @@ from meshroom.core.node import Node
|
|||
|
||||
|
||||
def test_multiviewPipeline():
|
||||
graph1 = meshroom.multiview.photogrammetry(inputImages=['/non/existing/fileA'])
|
||||
graph2 = meshroom.multiview.photogrammetry(inputImages=[])
|
||||
graph2b = meshroom.multiview.photogrammetry(inputImages=[])
|
||||
graph3 = meshroom.multiview.photogrammetry(inputImages=['/non/existing/file1', '/non/existing/file2'])
|
||||
graph4 = meshroom.multiview.photogrammetry(inputViewpoints=[
|
||||
graph1InputImages = ['/non/existing/fileA']
|
||||
graph1 = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph1CameraInit = graph1.node("CameraInit_1")
|
||||
graph1CameraInit.viewpoints.extend([{'path': image} for image in graph1InputImages])
|
||||
|
||||
graph2InputImages = [] # common to graph2 and graph2b
|
||||
graph2 = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph2CameraInit = graph2.node("CameraInit_1")
|
||||
graph2CameraInit.viewpoints.extend([{'path': image} for image in graph2InputImages])
|
||||
graph2b = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph2bCameraInit = graph2b.node("CameraInit_1")
|
||||
graph2bCameraInit.viewpoints.extend([{'path': image} for image in graph2InputImages])
|
||||
|
||||
graph3InputImages = ['/non/existing/file1', '/non/existing/file2']
|
||||
graph3 = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph3CameraInit = graph3.node("CameraInit_1")
|
||||
graph3CameraInit.viewpoints.extend([{'path': image} for image in graph3InputImages])
|
||||
|
||||
graph4InputViewpoints = [
|
||||
{'path': '/non/existing/file1', 'intrinsicId': 50},
|
||||
{'path': '/non/existing/file2', 'intrinsicId': 55}
|
||||
])
|
||||
graph4b = meshroom.multiview.photogrammetry(inputViewpoints=[
|
||||
{'path': '/non/existing/file1', 'intrinsicId': 50},
|
||||
{'path': '/non/existing/file2', 'intrinsicId': 55}
|
||||
])
|
||||
] # common to graph4 and graph4b
|
||||
graph4 = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph4CameraInit = graph4.node("CameraInit_1")
|
||||
graph4CameraInit.viewpoints.extend(graph4InputViewpoints)
|
||||
graph4b = loadGraph(meshroom.core.pipelineTemplates["photogrammetry"])
|
||||
graph4bCameraInit = graph4b.node("CameraInit_1")
|
||||
graph4bCameraInit.viewpoints.extend(graph4InputViewpoints)
|
||||
|
||||
assert graph1.findNode('CameraInit').viewpoints.at(0).path.value == '/non/existing/fileA'
|
||||
assert len(graph2.findNode('CameraInit').viewpoints) == 0
|
||||
|
|
Loading…
Add table
Reference in a new issue