Merge remote-tracking branch 'origin/develop' into dev/panoramaFisheye

This commit is contained in:
Fabien Castan 2020-03-26 13:56:51 +01:00
commit 2f17e9aa80
7 changed files with 185 additions and 65 deletions

View file

@ -132,9 +132,6 @@ class LogManager:
def __init__(self, chunk): def __init__(self, chunk):
self.chunk = chunk self.chunk = chunk
self.chunk.statusChanged.connect(self.clear)
self.progressBar = False
self.cleared = False
self.logger = logging.getLogger(chunk.node.getName()) self.logger = logging.getLogger(chunk.node.getName())
class Formatter(logging.Formatter): class Formatter(logging.Formatter):
@ -151,27 +148,22 @@ class LogManager:
handler.setFormatter(formatter) handler.setFormatter(formatter)
self.logger.addHandler(handler) self.logger.addHandler(handler)
def clear(self): def start(self, level):
if self.chunk.statusName == 'RUNNING' and not self.cleared: # Clear log file
open(self.chunk.logFile, 'w').close() open(self.chunk.logFile, 'w').close()
self.configureLogger()
self.cleared = True self.configureLogger()
# When the node gets ran again the log needs to be cleared self.logger.setLevel(self.textToLevel(level))
elif self.chunk.statusName in ['ERROR', 'SUCCESS']: self.progressBar = False
for handler in self.logger.handlers[:]:
# Stops the file being locked
handler.close()
self.cleared = False
self.progressBar = False
def waitUntilCleared(self): def end(self):
while not self.cleared: for handler in self.logger.handlers[:]:
time.sleep(0.01) # Stops the file being locked
handler.close()
def makeProgressBar(self, end, message=''): def makeProgressBar(self, end, message=''):
assert end > 0 assert end > 0
assert not self.progressBar assert not self.progressBar
self.waitUntilCleared()
self.progressEnd = end self.progressEnd = end
self.currentProgressTics = 0 self.currentProgressTics = 0
@ -194,7 +186,6 @@ class LogManager:
def updateProgressBar(self, value): def updateProgressBar(self, value):
assert self.progressBar assert self.progressBar
assert value <= self.progressEnd assert value <= self.progressEnd
self.waitUntilCleared()
tics = round((value/self.progressEnd)*51) tics = round((value/self.progressEnd)*51)

View file

@ -0,0 +1,110 @@
__version__ = "1.1"
from meshroom.core import desc
class ImageProcessing(desc.CommandLineNode):
commandLine = 'aliceVision_utils_imageProcessing {allParams}'
size = desc.DynamicNodeSize('input')
# parallelization = desc.Parallelization(blockSize=40)
# commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
inputs = [
desc.File(
name='input',
label='Input',
description='SfMData file.',
value='',
uid=[0],
),
desc.ChoiceParam(
name='extension',
label='File Extension',
description='File Extension.',
value='',
values=['', 'exr', 'jpg', 'tiff', 'png'],
exclusive=True,
uid=[0],
),
desc.BoolParam(
name='reconstructedViewsOnly',
label='Only Reconstructed Views',
description='Process Only Reconstructed Views',
value=False,
uid=[0],
),
desc.BoolParam(
name='exposureCompensation',
label='Exposure Compensation',
description='Exposure Compensation',
value=False,
uid=[0],
),
desc.FloatParam(
name='downscale',
label='Downscale',
description='Downscale.',
value=1.0,
range=(0.0, 1.0, 0.01),
uid=[0],
),
desc.FloatParam(
name='contrast',
label='Contrast',
description='Contrast.',
value=1.0,
range=(0.0, 100.0, 0.1),
uid=[0],
),
desc.IntParam(
name='medianFilter',
label='Median Filter',
description='Median Filter.',
value=0,
range=(0, 10, 1),
uid=[0],
),
desc.IntParam(
name='sharpenWidth',
label='Sharpen Width',
description='Sharpen Width.',
value=1,
range=(1, 9, 2),
uid=[0],
),
desc.FloatParam(
name='sharpenContrast',
label='Sharpen Contrast',
description='Sharpen Contrast.',
value=1.0,
range=(0.0, 100.0, 0.1),
uid=[0],
),
desc.FloatParam(
name='sharpenThreshold',
label='Sharpen Threshold',
description='Sharpen Threshold.',
value=0.0,
range=(0.0, 1.0, 0.01),
uid=[0],
),
desc.ChoiceParam(
name='verboseLevel',
label='Verbose Level',
description='verbosity level (fatal, error, warning, info, debug, trace).',
value='info',
values=['fatal', 'error', 'warning', 'info', 'debug', 'trace'],
exclusive=True,
uid=[],
)
]
outputs = [
desc.File(
name='outSfMData',
label='Output sfmData',
description='Output sfmData.',
value=desc.Node.internalFolder + 'sfmData.abc',
uid=[],
),
]

View file

@ -74,7 +74,7 @@ for now, the parameters are difficult to control and vary a lot from one dataset
label='Mesh Update Method', label='Mesh Update Method',
description='Mesh Update Method\n' description='Mesh Update Method\n'
' * ITERATIVE_UPDATE (default): ShapeUp styled iterative solver \n' ' * ITERATIVE_UPDATE (default): ShapeUp styled iterative solver \n'
' * POISSON_UPDATE: Poisson-based update from [Want et al. 2015]', ' * POISSON_UPDATE: Poisson-based update from [Wang et al. 2015] "Rolling guidance normal filter for geometric processing"',
value=0, value=0,
values=(0, 1), values=(0, 1),
exclusive=True, exclusive=True,

View file

@ -55,27 +55,29 @@ This node allows to copy files into a specific folder.
return paths return paths
def processChunk(self, chunk): def processChunk(self, chunk):
chunk.logManager.waitUntilCleared() try:
chunk.logger.setLevel(chunk.logManager.textToLevel(chunk.node.verboseLevel.value)) chunk.logManager.start(chunk.node.verboseLevel.value)
if not chunk.node.inputFiles: if not chunk.node.inputFiles:
chunk.logger.warning('Nothing to publish') chunk.logger.warning('Nothing to publish')
return return
if not chunk.node.output.value: if not chunk.node.output.value:
return return
outFiles = self.resolvedPaths(chunk.node.inputFiles.value, chunk.node.output.value) outFiles = self.resolvedPaths(chunk.node.inputFiles.value, chunk.node.output.value)
if not outFiles: if not outFiles:
error = 'Publish: input files listed, but nothing to publish' error = 'Publish: input files listed, but nothing to publish'
chunk.logger.error(error) chunk.logger.error(error)
chunk.logger.info('Listed input files: {}'.format([i.value for i in chunk.node.inputFiles.value])) chunk.logger.info('Listed input files: {}'.format([i.value for i in chunk.node.inputFiles.value]))
raise RuntimeError(error) raise RuntimeError(error)
if not os.path.exists(chunk.node.output.value): if not os.path.exists(chunk.node.output.value):
os.mkdir(chunk.node.output.value) os.mkdir(chunk.node.output.value)
for iFile, oFile in outFiles.items(): for iFile, oFile in outFiles.items():
chunk.logger.info('Publish file {} into {}'.format(iFile, oFile)) chunk.logger.info('Publish file {} into {}'.format(iFile, oFile))
shutil.copyfile(iFile, oFile) shutil.copyfile(iFile, oFile)
chunk.logger.info('Publish end') chunk.logger.info('Publish end')
finally:
chunk.logManager.end()

View file

@ -221,31 +221,31 @@ Upload a textured mesh on Sketchfab.
return self._stopped return self._stopped
def processChunk(self, chunk): def processChunk(self, chunk):
self._stopped = False
chunk.logManager.waitUntilCleared()
chunk.logger.setLevel(chunk.logManager.textToLevel(chunk.node.verboseLevel.value))
if not chunk.node.inputFiles:
chunk.logger.warning('Nothing to upload')
return
if chunk.node.apiToken.value == '':
chunk.logger.error('Need API token.')
raise RuntimeError()
if len(chunk.node.title.value) > 48:
chunk.logger.error('Title cannot be longer than 48 characters.')
raise RuntimeError()
if len(chunk.node.description.value) > 1024:
chunk.logger.error('Description cannot be longer than 1024 characters.')
raise RuntimeError()
tags = [ i.value.replace(' ', '-') for i in chunk.node.tags.value.values() ]
if all(len(i) > 48 for i in tags) and len(tags) > 0:
chunk.logger.error('Tags cannot be longer than 48 characters.')
raise RuntimeError()
if len(tags) > 42:
chunk.logger.error('Maximum of 42 separate tags.')
raise RuntimeError()
try: try:
self._stopped = False
chunk.logManager.start(chunk.node.verboseLevel.value)
uploadFile = ''
if not chunk.node.inputFiles:
chunk.logger.warning('Nothing to upload')
return
if chunk.node.apiToken.value == '':
chunk.logger.error('Need API token.')
raise RuntimeError()
if len(chunk.node.title.value) > 48:
chunk.logger.error('Title cannot be longer than 48 characters.')
raise RuntimeError()
if len(chunk.node.description.value) > 1024:
chunk.logger.error('Description cannot be longer than 1024 characters.')
raise RuntimeError()
tags = [ i.value.replace(' ', '-') for i in chunk.node.tags.value.values() ]
if all(len(i) > 48 for i in tags) and len(tags) > 0:
chunk.logger.error('Tags cannot be longer than 48 characters.')
raise RuntimeError()
if len(tags) > 42:
chunk.logger.error('Maximum of 42 separate tags.')
raise RuntimeError()
data = { data = {
'name': chunk.node.title.value, 'name': chunk.node.title.value,
'description': chunk.node.description.value, 'description': chunk.node.description.value,
@ -281,5 +281,7 @@ Upload a textured mesh on Sketchfab.
os.remove(uploadFile) os.remove(uploadFile)
chunk.logger.debug('Deleted {}'.format(uploadFile)) chunk.logger.debug('Deleted {}'.format(uploadFile))
chunk.logManager.end()
def stopProcess(self, chunk): def stopProcess(self, chunk):
self._stopped = True self._stopped = True

View file

@ -115,6 +115,18 @@ It iterates like that, adding cameras and triangulating new 2D features into 3D
uid=[0], uid=[0],
advanced=True, advanced=True,
), ),
desc.ChoiceParam(
name='observationConstraint',
label='Observation Constraint',
description='Observation contraint mode used in the optimization:\n'
' * Basic: Use standard reprojection error in pixel coordinates\n'
' * Scale: Use reprojection error in pixel coordinates but relative to the feature scale',
value='Basic',
values=['Basic', 'Scale'],
exclusive=True,
uid=[0],
advanced=True,
),
desc.IntParam( desc.IntParam(
name='localizerEstimatorMaxIterations', name='localizerEstimatorMaxIterations',
label='Localizer Max Ransac Iterations', label='Localizer Max Ransac Iterations',

View file

@ -41,6 +41,9 @@ class PlatformExecutable(Executable):
build_exe_options = { build_exe_options = {
# include dynamically loaded plugins # include dynamically loaded plugins
"packages": ["meshroom.nodes", "meshroom.submitters"], "packages": ["meshroom.nodes", "meshroom.submitters"],
"includes": [
"idna.idnadata", # Dependency needed by SketchfabUpload node, but not detected by cx_Freeze
],
"include_files": ["CHANGES.md", "COPYING.md", "LICENSE-MPL2.md", "README.md"] "include_files": ["CHANGES.md", "COPYING.md", "LICENSE-MPL2.md", "README.md"]
} }