[nodes] add parallelization to LdrToHdrSampling/Merge and PanoramaWarping

This commit is contained in:
Fabien Castan 2020-07-05 23:11:07 +02:00
parent a239005901
commit dc6ac5d67a
6 changed files with 90 additions and 39 deletions

View file

@ -188,7 +188,8 @@ def hdriPipeline(graph):
input=panoramaOrientation.output)
panoramaCompositing = graph.addNewNode('PanoramaCompositing',
input=panoramaWarping.output)
input=panoramaWarping.input,
warpingFolder=panoramaWarping.output)
imageProcessing = graph.addNewNode('ImageProcessing',
input=panoramaCompositing.output,

View file

@ -91,14 +91,6 @@ class LdrToHdrCalibration(desc.CommandLineNode):
range=(0, 10, 1),
uid=[],
),
desc.BoolParam(
name='byPass',
label='bypass convert',
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
value=False,
uid=[0],
advanced=True,
),
desc.IntParam(
name='channelQuantizationPower',
label='Channel Quantization Power',
@ -108,6 +100,17 @@ class LdrToHdrCalibration(desc.CommandLineNode):
uid=[0],
advanced=True,
),
desc.IntParam(
name='maxTotalPoints',
label='Max Number of Points',
description='Max number of points selected by the sampling strategy.\n''
'This ensures that this sampling step will extract a number of pixels values\n'
'that the calibration step can manage (in term of computation time and memory usage).',
value=1000000,
range=(8, 10000000, 1000),
uid=[0],
advanced=True,
),
desc.ChoiceParam(
name='verboseLevel',
label='Verbose Level',
@ -145,9 +148,7 @@ class LdrToHdrCalibration(desc.CommandLineNode):
if not cameraInitOutput:
node.nbBrackets.value = 0
return
print("LdrToHdrCalib cameraInitOutput: " + str(cameraInitOutput))
viewpoints = cameraInitOutput.node.viewpoints.value
print("LdrToHdrCalib viewpoints: " + str(viewpoints))
# logging.info("[LDRToHDR] Update start: nb viewpoints:" + str(len(viewpoints)))
inputs = []

View file

@ -23,28 +23,11 @@ def findMetadata(d, keys, defaultValue):
return defaultValue
class DividedInputNodeSize(desc.DynamicNodeSize):
"""
The LDR2HDR will reduce the amount of views in the SfMData.
This class converts the number of LDR input views into the number of HDR output views.
"""
def __init__(self, param, divParam):
super(DividedInputNodeSize, self).__init__(param)
self._divParam = divParam
def computeSize(self, node):
s = super(DividedInputNodeSize, self).computeSize(node)
divParam = node.attribute(self._divParam)
if divParam.value == 0:
return s
return s / divParam.value
class LdrToHdrMerge(desc.CommandLineNode):
commandLine = 'aliceVision_LdrToHdrMerge {allParams}'
size = DividedInputNodeSize('input', 'nbBrackets')
#parallelization = desc.Parallelization(blockSize=40)
#commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
size = desc.DynamicNodeSize('input')
parallelization = desc.Parallelization(blockSize=2)
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
documentation = '''
Calibrate LDR to HDR response curve from samples
@ -97,7 +80,7 @@ class LdrToHdrMerge(desc.CommandLineNode):
desc.BoolParam(
name='byPass',
label='bypass convert',
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
description="Bypass HDR creation and use the medium bracket as the source for the next steps.",
value=False,
uid=[0],
advanced=True,
@ -111,6 +94,40 @@ class LdrToHdrMerge(desc.CommandLineNode):
uid=[0],
advanced=True,
),
desc.FloatParam(
name='highlightCorrectionFactor',
label='Highlights Correction',
description='Pixels saturated in all input images have a partial information about their real luminance.\n'
'We only know that the value should be >= to the standard hdr fusion.\n'
'This parameter allows to perform a post-processing step to put saturated pixels to a constant '
'value defined by the `highlightsMaxLuminance` parameter.\n'
'This parameter is float to enable to weight this correction.',
value=1.0,
range=(0.0, 1.0, 0.01),
uid=[0],
),
desc.FloatParam(
name='highlightTargetLux',
label='Highlight Target Luminance (Lux)',
description='This is an arbitrary target value (in Lux) used to replace the unknown luminance value of the saturated pixels.\n'
'\n'
'Some Outdoor Reference Light Levels:\n'
' * 120,000 lux : Brightest sunlight\n'
' * 110,000 lux : Bright sunlight\n'
' * 20,000 lux : Shade illuminated by entire clear blue sky, midday\n'
' * 1,000 lux : Typical overcast day, midday\n'
' * 400 lux : Sunrise or sunset on a clear day\n'
' * 40 lux : Fully overcast, sunset/sunrise\n'
'\n'
'Some Indoor Reference Light Levels:\n'
' * 20000 lux : Max Usually Used Indoor\n'
' * 750 lux : Supermarkets\n'
' * 500 lux : Office Work\n'
' * 150 lux : Home\n',
value=120000.0,
range=(1000.0, 150000.0, 1.0),
uid=[0],
),
desc.ChoiceParam(
name='verboseLevel',
label='Verbose Level',

View file

@ -5,6 +5,7 @@ import os
from meshroom.core import desc
def findMetadata(d, keys, defaultValue):
v = None
for key in keys:
@ -23,11 +24,27 @@ def findMetadata(d, keys, defaultValue):
return defaultValue
class DividedInputNodeSize(desc.DynamicNodeSize):
"""
The LDR2HDR will reduce the amount of views in the SfMData.
This class converts the number of LDR input views into the number of HDR output views.
"""
def __init__(self, param, divParam):
super(DividedInputNodeSize, self).__init__(param)
self._divParam = divParam
def computeSize(self, node):
s = super(DividedInputNodeSize, self).computeSize(node)
divParam = node.attribute(self._divParam)
if divParam.value == 0:
return s
return s / divParam.value
class LdrToHdrSampling(desc.CommandLineNode):
commandLine = 'aliceVision_LdrToHdrSampling {allParams}'
size = desc.DynamicNodeSize('input')
#parallelization = desc.Parallelization(blockSize=40)
#commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
size = DividedInputNodeSize('input', 'nbBrackets')
parallelization = desc.Parallelization(blockSize=2)
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
documentation = '''
Sample pixels from Low range images for HDR creation
@ -64,7 +81,7 @@ class LdrToHdrSampling(desc.CommandLineNode):
description="Bypass HDR creation and use the medium bracket as the source for the next steps",
value=False,
uid=[0],
advanced=True,
group='internal',
),
desc.IntParam(
name='channelQuantizationPower',
@ -96,6 +113,11 @@ class LdrToHdrSampling(desc.CommandLineNode):
),
]
def processChunk(self, chunk):
if chunk.node.byPass.value:
return
super(LdrToHdrSampling, self).processChunk(chunk)
@classmethod
def update(cls, node):
if not isinstance(node.nodeDesc, cls):
@ -108,7 +130,7 @@ class LdrToHdrSampling(desc.CommandLineNode):
node.nbBrackets.value = node.userNbBrackets.value
return
# logging.info("[LDRToHDR] Update start: version:" + str(node.packageVersion))
cameraInitOutput = node.input.getLinkParam()
cameraInitOutput = node.input.getLinkParam(recursive=True)
if not cameraInitOutput:
node.nbBrackets.value = 0
return

View file

@ -20,8 +20,15 @@ Multiple cameras are contributing to the low frequencies and only the best one c
inputs = [
desc.File(
name='input',
label='Input',
description="Panorama Warping result",
label='Input SfMData',
description="Input SfMData.",
value='',
uid=[0],
),
desc.File(
name='warpingFolder',
label='Warping Folder',
description="Panorama Warping results",
value='',
uid=[0],
),

View file

@ -10,6 +10,9 @@ class PanoramaWarping(desc.CommandLineNode):
commandLine = 'aliceVision_panoramaWarping {allParams}'
size = desc.DynamicNodeSize('input')
parallelization = desc.Parallelization(blockSize=5)
commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
documentation = '''
Compute the image warping for each input image in the panorama coordinate system.
'''