mirror of
https://github.com/alicevision/Meshroom.git
synced 2025-04-30 18:57:53 +02:00
[nodes] add parallelization to LdrToHdrSampling/Merge and PanoramaWarping
This commit is contained in:
parent
a239005901
commit
dc6ac5d67a
6 changed files with 90 additions and 39 deletions
|
@ -188,7 +188,8 @@ def hdriPipeline(graph):
|
||||||
input=panoramaOrientation.output)
|
input=panoramaOrientation.output)
|
||||||
|
|
||||||
panoramaCompositing = graph.addNewNode('PanoramaCompositing',
|
panoramaCompositing = graph.addNewNode('PanoramaCompositing',
|
||||||
input=panoramaWarping.output)
|
input=panoramaWarping.input,
|
||||||
|
warpingFolder=panoramaWarping.output)
|
||||||
|
|
||||||
imageProcessing = graph.addNewNode('ImageProcessing',
|
imageProcessing = graph.addNewNode('ImageProcessing',
|
||||||
input=panoramaCompositing.output,
|
input=panoramaCompositing.output,
|
||||||
|
|
|
@ -91,14 +91,6 @@ class LdrToHdrCalibration(desc.CommandLineNode):
|
||||||
range=(0, 10, 1),
|
range=(0, 10, 1),
|
||||||
uid=[],
|
uid=[],
|
||||||
),
|
),
|
||||||
desc.BoolParam(
|
|
||||||
name='byPass',
|
|
||||||
label='bypass convert',
|
|
||||||
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
|
|
||||||
value=False,
|
|
||||||
uid=[0],
|
|
||||||
advanced=True,
|
|
||||||
),
|
|
||||||
desc.IntParam(
|
desc.IntParam(
|
||||||
name='channelQuantizationPower',
|
name='channelQuantizationPower',
|
||||||
label='Channel Quantization Power',
|
label='Channel Quantization Power',
|
||||||
|
@ -108,6 +100,17 @@ class LdrToHdrCalibration(desc.CommandLineNode):
|
||||||
uid=[0],
|
uid=[0],
|
||||||
advanced=True,
|
advanced=True,
|
||||||
),
|
),
|
||||||
|
desc.IntParam(
|
||||||
|
name='maxTotalPoints',
|
||||||
|
label='Max Number of Points',
|
||||||
|
description='Max number of points selected by the sampling strategy.\n''
|
||||||
|
'This ensures that this sampling step will extract a number of pixels values\n'
|
||||||
|
'that the calibration step can manage (in term of computation time and memory usage).',
|
||||||
|
value=1000000,
|
||||||
|
range=(8, 10000000, 1000),
|
||||||
|
uid=[0],
|
||||||
|
advanced=True,
|
||||||
|
),
|
||||||
desc.ChoiceParam(
|
desc.ChoiceParam(
|
||||||
name='verboseLevel',
|
name='verboseLevel',
|
||||||
label='Verbose Level',
|
label='Verbose Level',
|
||||||
|
@ -145,9 +148,7 @@ class LdrToHdrCalibration(desc.CommandLineNode):
|
||||||
if not cameraInitOutput:
|
if not cameraInitOutput:
|
||||||
node.nbBrackets.value = 0
|
node.nbBrackets.value = 0
|
||||||
return
|
return
|
||||||
print("LdrToHdrCalib cameraInitOutput: " + str(cameraInitOutput))
|
|
||||||
viewpoints = cameraInitOutput.node.viewpoints.value
|
viewpoints = cameraInitOutput.node.viewpoints.value
|
||||||
print("LdrToHdrCalib viewpoints: " + str(viewpoints))
|
|
||||||
|
|
||||||
# logging.info("[LDRToHDR] Update start: nb viewpoints:" + str(len(viewpoints)))
|
# logging.info("[LDRToHDR] Update start: nb viewpoints:" + str(len(viewpoints)))
|
||||||
inputs = []
|
inputs = []
|
||||||
|
|
|
@ -23,28 +23,11 @@ def findMetadata(d, keys, defaultValue):
|
||||||
return defaultValue
|
return defaultValue
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class DividedInputNodeSize(desc.DynamicNodeSize):
|
|
||||||
"""
|
|
||||||
The LDR2HDR will reduce the amount of views in the SfMData.
|
|
||||||
This class converts the number of LDR input views into the number of HDR output views.
|
|
||||||
"""
|
|
||||||
def __init__(self, param, divParam):
|
|
||||||
super(DividedInputNodeSize, self).__init__(param)
|
|
||||||
self._divParam = divParam
|
|
||||||
def computeSize(self, node):
|
|
||||||
s = super(DividedInputNodeSize, self).computeSize(node)
|
|
||||||
divParam = node.attribute(self._divParam)
|
|
||||||
if divParam.value == 0:
|
|
||||||
return s
|
|
||||||
return s / divParam.value
|
|
||||||
|
|
||||||
|
|
||||||
class LdrToHdrMerge(desc.CommandLineNode):
|
class LdrToHdrMerge(desc.CommandLineNode):
|
||||||
commandLine = 'aliceVision_LdrToHdrMerge {allParams}'
|
commandLine = 'aliceVision_LdrToHdrMerge {allParams}'
|
||||||
size = DividedInputNodeSize('input', 'nbBrackets')
|
size = desc.DynamicNodeSize('input')
|
||||||
#parallelization = desc.Parallelization(blockSize=40)
|
parallelization = desc.Parallelization(blockSize=2)
|
||||||
#commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
||||||
|
|
||||||
documentation = '''
|
documentation = '''
|
||||||
Calibrate LDR to HDR response curve from samples
|
Calibrate LDR to HDR response curve from samples
|
||||||
|
@ -97,7 +80,7 @@ class LdrToHdrMerge(desc.CommandLineNode):
|
||||||
desc.BoolParam(
|
desc.BoolParam(
|
||||||
name='byPass',
|
name='byPass',
|
||||||
label='bypass convert',
|
label='bypass convert',
|
||||||
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
|
description="Bypass HDR creation and use the medium bracket as the source for the next steps.",
|
||||||
value=False,
|
value=False,
|
||||||
uid=[0],
|
uid=[0],
|
||||||
advanced=True,
|
advanced=True,
|
||||||
|
@ -111,6 +94,40 @@ class LdrToHdrMerge(desc.CommandLineNode):
|
||||||
uid=[0],
|
uid=[0],
|
||||||
advanced=True,
|
advanced=True,
|
||||||
),
|
),
|
||||||
|
desc.FloatParam(
|
||||||
|
name='highlightCorrectionFactor',
|
||||||
|
label='Highlights Correction',
|
||||||
|
description='Pixels saturated in all input images have a partial information about their real luminance.\n'
|
||||||
|
'We only know that the value should be >= to the standard hdr fusion.\n'
|
||||||
|
'This parameter allows to perform a post-processing step to put saturated pixels to a constant '
|
||||||
|
'value defined by the `highlightsMaxLuminance` parameter.\n'
|
||||||
|
'This parameter is float to enable to weight this correction.',
|
||||||
|
value=1.0,
|
||||||
|
range=(0.0, 1.0, 0.01),
|
||||||
|
uid=[0],
|
||||||
|
),
|
||||||
|
desc.FloatParam(
|
||||||
|
name='highlightTargetLux',
|
||||||
|
label='Highlight Target Luminance (Lux)',
|
||||||
|
description='This is an arbitrary target value (in Lux) used to replace the unknown luminance value of the saturated pixels.\n'
|
||||||
|
'\n'
|
||||||
|
'Some Outdoor Reference Light Levels:\n'
|
||||||
|
' * 120,000 lux : Brightest sunlight\n'
|
||||||
|
' * 110,000 lux : Bright sunlight\n'
|
||||||
|
' * 20,000 lux : Shade illuminated by entire clear blue sky, midday\n'
|
||||||
|
' * 1,000 lux : Typical overcast day, midday\n'
|
||||||
|
' * 400 lux : Sunrise or sunset on a clear day\n'
|
||||||
|
' * 40 lux : Fully overcast, sunset/sunrise\n'
|
||||||
|
'\n'
|
||||||
|
'Some Indoor Reference Light Levels:\n'
|
||||||
|
' * 20000 lux : Max Usually Used Indoor\n'
|
||||||
|
' * 750 lux : Supermarkets\n'
|
||||||
|
' * 500 lux : Office Work\n'
|
||||||
|
' * 150 lux : Home\n',
|
||||||
|
value=120000.0,
|
||||||
|
range=(1000.0, 150000.0, 1.0),
|
||||||
|
uid=[0],
|
||||||
|
),
|
||||||
desc.ChoiceParam(
|
desc.ChoiceParam(
|
||||||
name='verboseLevel',
|
name='verboseLevel',
|
||||||
label='Verbose Level',
|
label='Verbose Level',
|
||||||
|
|
|
@ -5,6 +5,7 @@ import os
|
||||||
|
|
||||||
from meshroom.core import desc
|
from meshroom.core import desc
|
||||||
|
|
||||||
|
|
||||||
def findMetadata(d, keys, defaultValue):
|
def findMetadata(d, keys, defaultValue):
|
||||||
v = None
|
v = None
|
||||||
for key in keys:
|
for key in keys:
|
||||||
|
@ -23,11 +24,27 @@ def findMetadata(d, keys, defaultValue):
|
||||||
return defaultValue
|
return defaultValue
|
||||||
|
|
||||||
|
|
||||||
|
class DividedInputNodeSize(desc.DynamicNodeSize):
|
||||||
|
"""
|
||||||
|
The LDR2HDR will reduce the amount of views in the SfMData.
|
||||||
|
This class converts the number of LDR input views into the number of HDR output views.
|
||||||
|
"""
|
||||||
|
def __init__(self, param, divParam):
|
||||||
|
super(DividedInputNodeSize, self).__init__(param)
|
||||||
|
self._divParam = divParam
|
||||||
|
def computeSize(self, node):
|
||||||
|
s = super(DividedInputNodeSize, self).computeSize(node)
|
||||||
|
divParam = node.attribute(self._divParam)
|
||||||
|
if divParam.value == 0:
|
||||||
|
return s
|
||||||
|
return s / divParam.value
|
||||||
|
|
||||||
|
|
||||||
class LdrToHdrSampling(desc.CommandLineNode):
|
class LdrToHdrSampling(desc.CommandLineNode):
|
||||||
commandLine = 'aliceVision_LdrToHdrSampling {allParams}'
|
commandLine = 'aliceVision_LdrToHdrSampling {allParams}'
|
||||||
size = desc.DynamicNodeSize('input')
|
size = DividedInputNodeSize('input', 'nbBrackets')
|
||||||
#parallelization = desc.Parallelization(blockSize=40)
|
parallelization = desc.Parallelization(blockSize=2)
|
||||||
#commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
||||||
|
|
||||||
documentation = '''
|
documentation = '''
|
||||||
Sample pixels from Low range images for HDR creation
|
Sample pixels from Low range images for HDR creation
|
||||||
|
@ -64,7 +81,7 @@ class LdrToHdrSampling(desc.CommandLineNode):
|
||||||
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
|
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
|
||||||
value=False,
|
value=False,
|
||||||
uid=[0],
|
uid=[0],
|
||||||
advanced=True,
|
group='internal',
|
||||||
),
|
),
|
||||||
desc.IntParam(
|
desc.IntParam(
|
||||||
name='channelQuantizationPower',
|
name='channelQuantizationPower',
|
||||||
|
@ -96,6 +113,11 @@ class LdrToHdrSampling(desc.CommandLineNode):
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def processChunk(self, chunk):
|
||||||
|
if chunk.node.byPass.value:
|
||||||
|
return
|
||||||
|
super(LdrToHdrSampling, self).processChunk(chunk)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update(cls, node):
|
def update(cls, node):
|
||||||
if not isinstance(node.nodeDesc, cls):
|
if not isinstance(node.nodeDesc, cls):
|
||||||
|
@ -108,7 +130,7 @@ class LdrToHdrSampling(desc.CommandLineNode):
|
||||||
node.nbBrackets.value = node.userNbBrackets.value
|
node.nbBrackets.value = node.userNbBrackets.value
|
||||||
return
|
return
|
||||||
# logging.info("[LDRToHDR] Update start: version:" + str(node.packageVersion))
|
# logging.info("[LDRToHDR] Update start: version:" + str(node.packageVersion))
|
||||||
cameraInitOutput = node.input.getLinkParam()
|
cameraInitOutput = node.input.getLinkParam(recursive=True)
|
||||||
if not cameraInitOutput:
|
if not cameraInitOutput:
|
||||||
node.nbBrackets.value = 0
|
node.nbBrackets.value = 0
|
||||||
return
|
return
|
||||||
|
|
|
@ -20,8 +20,15 @@ Multiple cameras are contributing to the low frequencies and only the best one c
|
||||||
inputs = [
|
inputs = [
|
||||||
desc.File(
|
desc.File(
|
||||||
name='input',
|
name='input',
|
||||||
label='Input',
|
label='Input SfMData',
|
||||||
description="Panorama Warping result",
|
description="Input SfMData.",
|
||||||
|
value='',
|
||||||
|
uid=[0],
|
||||||
|
),
|
||||||
|
desc.File(
|
||||||
|
name='warpingFolder',
|
||||||
|
label='Warping Folder',
|
||||||
|
description="Panorama Warping results",
|
||||||
value='',
|
value='',
|
||||||
uid=[0],
|
uid=[0],
|
||||||
),
|
),
|
||||||
|
|
|
@ -10,6 +10,9 @@ class PanoramaWarping(desc.CommandLineNode):
|
||||||
commandLine = 'aliceVision_panoramaWarping {allParams}'
|
commandLine = 'aliceVision_panoramaWarping {allParams}'
|
||||||
size = desc.DynamicNodeSize('input')
|
size = desc.DynamicNodeSize('input')
|
||||||
|
|
||||||
|
parallelization = desc.Parallelization(blockSize=5)
|
||||||
|
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
||||||
|
|
||||||
documentation = '''
|
documentation = '''
|
||||||
Compute the image warping for each input image in the panorama coordinate system.
|
Compute the image warping for each input image in the panorama coordinate system.
|
||||||
'''
|
'''
|
||||||
|
|
Loading…
Add table
Reference in a new issue