mirror of
https://github.com/alicevision/Meshroom.git
synced 2025-04-29 18:27:23 +02:00
[multiview] new experimental pipeline for camera tracking
This commit is contained in:
parent
2f18e89a9d
commit
0ddda9f982
4 changed files with 72 additions and 3 deletions
5
bin/meshroom_batch
Normal file → Executable file
5
bin/meshroom_batch
Normal file → Executable file
|
@ -21,7 +21,7 @@ parser.add_argument('-I', '--inputRecursive', metavar='FOLDERS/IMAGES', type=str
|
|||
help='Input folders containing all images recursively.')
|
||||
|
||||
parser.add_argument('-p', '--pipeline', metavar='photogrammetry/panoramaHdr/panoramaFisheyeHdr/MG_FILE', type=str, default='photogrammetry',
|
||||
help='"photogrammetry" pipeline, "panoramaHdr" pipeline, "panoramaFisheyeHdr" pipeline or a Meshroom file containing a custom pipeline to run on input images. '
|
||||
help='"photogrammetry", "panoramaHdr", "panoramaFisheyeHdr", "cameraTracking" pipeline or a Meshroom file containing a custom pipeline to run on input images. '
|
||||
'Requirements: the graph must contain one CameraInit node, '
|
||||
'and one Publish node if --output is set.')
|
||||
|
||||
|
@ -119,6 +119,9 @@ with multiview.GraphModification(graph):
|
|||
elif args.pipeline.lower() == "panoramafisheyehdr":
|
||||
# default panorama Fisheye Hdr pipeline
|
||||
multiview.panoramaFisheyeHdr(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
elif args.pipeline.lower() == "cameratracking":
|
||||
# default panorama Fisheye Hdr pipeline
|
||||
multiview.cameraTracking(inputViewpoints=views, inputIntrinsics=intrinsics, output=args.output, graph=graph)
|
||||
else:
|
||||
# custom pipeline
|
||||
graph.load(args.pipeline)
|
||||
|
|
|
@ -182,9 +182,11 @@ def panoramaFisheyeHdr(inputImages=None, inputViewpoints=None, inputIntrinsics=N
|
|||
panoramaHdr(inputImages, inputViewpoints, inputIntrinsics, output, graph)
|
||||
for panoramaInit in graph.nodesOfType("PanoramaInit"):
|
||||
panoramaInit.attribute("useFisheye").value = True
|
||||
for featureExtraction in graph.nodesOfType("FeatureExtraction"):
|
||||
# when using fisheye images, 'sift' performs better than 'dspsift'
|
||||
featureExtraction.attribute("describerTypes").value = ['sift']
|
||||
# when using fisheye images, the overlap between images can be small
|
||||
# and thus requires many features to get enough correspondances for cameras estimation
|
||||
for featureExtraction in graph.nodesOfType("FeatureExtraction"):
|
||||
featureExtraction.attribute("describerPreset").value = 'high'
|
||||
return graph
|
||||
|
||||
|
@ -468,3 +470,60 @@ def sfmAugmentation(graph, sourceSfm, withMVS=False):
|
|||
mvsNodes = mvsPipeline(graph, structureFromMotion)
|
||||
|
||||
return sfmNodes, mvsNodes
|
||||
|
||||
|
||||
def cameraTrackingPipeline(graph):
|
||||
"""
|
||||
Instantiate a camera tracking pipeline inside 'graph'.
|
||||
|
||||
Args:
|
||||
graph (Graph/UIGraph): the graph in which nodes should be instantiated
|
||||
|
||||
Returns:
|
||||
list of Node: the created nodes
|
||||
"""
|
||||
|
||||
with GraphModification(graph):
|
||||
|
||||
cameraInit, featureExtraction, imageMatching, featureMatching, structureFromMotion = sfmPipeline(graph)
|
||||
|
||||
imageMatching.attribute("nbMatches").value = 5 # voctree nb matches
|
||||
imageMatching.attribute("nbNeighbors").value = 10
|
||||
|
||||
structureFromMotion.attribute("minNumberOfMatches").value = 0
|
||||
structureFromMotion.attribute("minInputTrackLength").value = 5
|
||||
structureFromMotion.attribute("minNumberOfObservationsForTriangulation").value = 3
|
||||
structureFromMotion.attribute("minAngleForTriangulation").value = 1.0
|
||||
structureFromMotion.attribute("minAngleForLandmark").value = 0.5
|
||||
|
||||
exportAnimatedCamera = graph.addNewNode('ExportAnimatedCamera', input=structureFromMotion.output)
|
||||
|
||||
# store current pipeline version in graph header
|
||||
graph.header.update({'pipelineVersion': __version__})
|
||||
|
||||
return [
|
||||
cameraInit,
|
||||
featureExtraction,
|
||||
imageMatching,
|
||||
featureMatching,
|
||||
structureFromMotion,
|
||||
exportAnimatedCamera,
|
||||
]
|
||||
|
||||
|
||||
def cameraTracking(inputImages=list(), inputViewpoints=list(), inputIntrinsics=list(), output='', graph=None):
|
||||
if not graph:
|
||||
graph = Graph('Camera Tracking')
|
||||
with GraphModification(graph):
|
||||
trackingNodes = cameraTrackingPipeline(graph)
|
||||
cameraInit = trackingNodes[0]
|
||||
cameraInit.viewpoints.extend([{'path': image} for image in inputImages])
|
||||
cameraInit.viewpoints.extend(inputViewpoints)
|
||||
cameraInit.intrinsics.extend(inputIntrinsics)
|
||||
|
||||
if output:
|
||||
exportNode = trackingNodes[-1]
|
||||
graph.addNewNode('Publish', output=output, inputFiles=[exportNode.output])
|
||||
|
||||
return graph
|
||||
|
||||
|
|
|
@ -420,6 +420,10 @@ ApplicationWindow {
|
|||
text: "Panorama Fisheye HDR"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("panoramafisheyehdr") })
|
||||
}
|
||||
Action {
|
||||
text: "Camera Tracking (experimental)"
|
||||
onTriggered: ensureSaved(function() { _reconstruction.new("cameratracking") })
|
||||
}
|
||||
}
|
||||
Action {
|
||||
id: openActionItem
|
||||
|
|
|
@ -490,6 +490,9 @@ class Reconstruction(UIGraph):
|
|||
elif p.lower() == "panoramafisheyehdr":
|
||||
# default panorama fisheye hdr pipeline
|
||||
self.setGraph(multiview.panoramaFisheyeHdr())
|
||||
elif p.lower() == "cameratracking":
|
||||
# default camera tracking pipeline
|
||||
self.setGraph(multiview.cameraTracking())
|
||||
else:
|
||||
# use the user-provided default photogrammetry project file
|
||||
self.load(p, setupProjectFile=False)
|
||||
|
|
Loading…
Add table
Reference in a new issue