mirror of
https://github.com/alicevision/Meshroom.git
synced 2025-06-05 12:21:59 +02:00
Merge remote-tracking branch 'origin/develop' into dev/panoramaFisheye
This commit is contained in:
commit
2f17e9aa80
7 changed files with 185 additions and 65 deletions
|
@ -132,9 +132,6 @@ class LogManager:
|
|||
|
||||
def __init__(self, chunk):
|
||||
self.chunk = chunk
|
||||
self.chunk.statusChanged.connect(self.clear)
|
||||
self.progressBar = False
|
||||
self.cleared = False
|
||||
self.logger = logging.getLogger(chunk.node.getName())
|
||||
|
||||
class Formatter(logging.Formatter):
|
||||
|
@ -151,27 +148,22 @@ class LogManager:
|
|||
handler.setFormatter(formatter)
|
||||
self.logger.addHandler(handler)
|
||||
|
||||
def clear(self):
|
||||
if self.chunk.statusName == 'RUNNING' and not self.cleared:
|
||||
open(self.chunk.logFile, 'w').close()
|
||||
self.configureLogger()
|
||||
self.cleared = True
|
||||
# When the node gets ran again the log needs to be cleared
|
||||
elif self.chunk.statusName in ['ERROR', 'SUCCESS']:
|
||||
for handler in self.logger.handlers[:]:
|
||||
# Stops the file being locked
|
||||
handler.close()
|
||||
self.cleared = False
|
||||
self.progressBar = False
|
||||
def start(self, level):
|
||||
# Clear log file
|
||||
open(self.chunk.logFile, 'w').close()
|
||||
|
||||
self.configureLogger()
|
||||
self.logger.setLevel(self.textToLevel(level))
|
||||
self.progressBar = False
|
||||
|
||||
def waitUntilCleared(self):
|
||||
while not self.cleared:
|
||||
time.sleep(0.01)
|
||||
def end(self):
|
||||
for handler in self.logger.handlers[:]:
|
||||
# Stops the file being locked
|
||||
handler.close()
|
||||
|
||||
def makeProgressBar(self, end, message=''):
|
||||
assert end > 0
|
||||
assert not self.progressBar
|
||||
self.waitUntilCleared()
|
||||
|
||||
self.progressEnd = end
|
||||
self.currentProgressTics = 0
|
||||
|
@ -194,7 +186,6 @@ class LogManager:
|
|||
def updateProgressBar(self, value):
|
||||
assert self.progressBar
|
||||
assert value <= self.progressEnd
|
||||
self.waitUntilCleared()
|
||||
|
||||
tics = round((value/self.progressEnd)*51)
|
||||
|
||||
|
|
110
meshroom/nodes/aliceVision/ImageProcessing.py
Normal file
110
meshroom/nodes/aliceVision/ImageProcessing.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
__version__ = "1.1"
|
||||
|
||||
from meshroom.core import desc
|
||||
|
||||
|
||||
class ImageProcessing(desc.CommandLineNode):
|
||||
commandLine = 'aliceVision_utils_imageProcessing {allParams}'
|
||||
size = desc.DynamicNodeSize('input')
|
||||
# parallelization = desc.Parallelization(blockSize=40)
|
||||
# commandLineRange = '--rangeStart {rangeStart} --rangeSize {rangeBlockSize}'
|
||||
|
||||
inputs = [
|
||||
desc.File(
|
||||
name='input',
|
||||
label='Input',
|
||||
description='SfMData file.',
|
||||
value='',
|
||||
uid=[0],
|
||||
),
|
||||
desc.ChoiceParam(
|
||||
name='extension',
|
||||
label='File Extension',
|
||||
description='File Extension.',
|
||||
value='',
|
||||
values=['', 'exr', 'jpg', 'tiff', 'png'],
|
||||
exclusive=True,
|
||||
uid=[0],
|
||||
),
|
||||
desc.BoolParam(
|
||||
name='reconstructedViewsOnly',
|
||||
label='Only Reconstructed Views',
|
||||
description='Process Only Reconstructed Views',
|
||||
value=False,
|
||||
uid=[0],
|
||||
),
|
||||
desc.BoolParam(
|
||||
name='exposureCompensation',
|
||||
label='Exposure Compensation',
|
||||
description='Exposure Compensation',
|
||||
value=False,
|
||||
uid=[0],
|
||||
),
|
||||
desc.FloatParam(
|
||||
name='downscale',
|
||||
label='Downscale',
|
||||
description='Downscale.',
|
||||
value=1.0,
|
||||
range=(0.0, 1.0, 0.01),
|
||||
uid=[0],
|
||||
),
|
||||
desc.FloatParam(
|
||||
name='contrast',
|
||||
label='Contrast',
|
||||
description='Contrast.',
|
||||
value=1.0,
|
||||
range=(0.0, 100.0, 0.1),
|
||||
uid=[0],
|
||||
),
|
||||
desc.IntParam(
|
||||
name='medianFilter',
|
||||
label='Median Filter',
|
||||
description='Median Filter.',
|
||||
value=0,
|
||||
range=(0, 10, 1),
|
||||
uid=[0],
|
||||
),
|
||||
desc.IntParam(
|
||||
name='sharpenWidth',
|
||||
label='Sharpen Width',
|
||||
description='Sharpen Width.',
|
||||
value=1,
|
||||
range=(1, 9, 2),
|
||||
uid=[0],
|
||||
),
|
||||
desc.FloatParam(
|
||||
name='sharpenContrast',
|
||||
label='Sharpen Contrast',
|
||||
description='Sharpen Contrast.',
|
||||
value=1.0,
|
||||
range=(0.0, 100.0, 0.1),
|
||||
uid=[0],
|
||||
),
|
||||
desc.FloatParam(
|
||||
name='sharpenThreshold',
|
||||
label='Sharpen Threshold',
|
||||
description='Sharpen Threshold.',
|
||||
value=0.0,
|
||||
range=(0.0, 1.0, 0.01),
|
||||
uid=[0],
|
||||
),
|
||||
desc.ChoiceParam(
|
||||
name='verboseLevel',
|
||||
label='Verbose Level',
|
||||
description='verbosity level (fatal, error, warning, info, debug, trace).',
|
||||
value='info',
|
||||
values=['fatal', 'error', 'warning', 'info', 'debug', 'trace'],
|
||||
exclusive=True,
|
||||
uid=[],
|
||||
)
|
||||
]
|
||||
|
||||
outputs = [
|
||||
desc.File(
|
||||
name='outSfMData',
|
||||
label='Output sfmData',
|
||||
description='Output sfmData.',
|
||||
value=desc.Node.internalFolder + 'sfmData.abc',
|
||||
uid=[],
|
||||
),
|
||||
]
|
|
@ -74,7 +74,7 @@ for now, the parameters are difficult to control and vary a lot from one dataset
|
|||
label='Mesh Update Method',
|
||||
description='Mesh Update Method\n'
|
||||
' * ITERATIVE_UPDATE (default): ShapeUp styled iterative solver \n'
|
||||
' * POISSON_UPDATE: Poisson-based update from [Want et al. 2015]',
|
||||
' * POISSON_UPDATE: Poisson-based update from [Wang et al. 2015] "Rolling guidance normal filter for geometric processing"',
|
||||
value=0,
|
||||
values=(0, 1),
|
||||
exclusive=True,
|
||||
|
|
|
@ -55,27 +55,29 @@ This node allows to copy files into a specific folder.
|
|||
return paths
|
||||
|
||||
def processChunk(self, chunk):
|
||||
chunk.logManager.waitUntilCleared()
|
||||
chunk.logger.setLevel(chunk.logManager.textToLevel(chunk.node.verboseLevel.value))
|
||||
|
||||
if not chunk.node.inputFiles:
|
||||
chunk.logger.warning('Nothing to publish')
|
||||
return
|
||||
if not chunk.node.output.value:
|
||||
return
|
||||
try:
|
||||
chunk.logManager.start(chunk.node.verboseLevel.value)
|
||||
|
||||
if not chunk.node.inputFiles:
|
||||
chunk.logger.warning('Nothing to publish')
|
||||
return
|
||||
if not chunk.node.output.value:
|
||||
return
|
||||
|
||||
outFiles = self.resolvedPaths(chunk.node.inputFiles.value, chunk.node.output.value)
|
||||
outFiles = self.resolvedPaths(chunk.node.inputFiles.value, chunk.node.output.value)
|
||||
|
||||
if not outFiles:
|
||||
error = 'Publish: input files listed, but nothing to publish'
|
||||
chunk.logger.error(error)
|
||||
chunk.logger.info('Listed input files: {}'.format([i.value for i in chunk.node.inputFiles.value]))
|
||||
raise RuntimeError(error)
|
||||
if not outFiles:
|
||||
error = 'Publish: input files listed, but nothing to publish'
|
||||
chunk.logger.error(error)
|
||||
chunk.logger.info('Listed input files: {}'.format([i.value for i in chunk.node.inputFiles.value]))
|
||||
raise RuntimeError(error)
|
||||
|
||||
if not os.path.exists(chunk.node.output.value):
|
||||
os.mkdir(chunk.node.output.value)
|
||||
if not os.path.exists(chunk.node.output.value):
|
||||
os.mkdir(chunk.node.output.value)
|
||||
|
||||
for iFile, oFile in outFiles.items():
|
||||
chunk.logger.info('Publish file {} into {}'.format(iFile, oFile))
|
||||
shutil.copyfile(iFile, oFile)
|
||||
chunk.logger.info('Publish end')
|
||||
for iFile, oFile in outFiles.items():
|
||||
chunk.logger.info('Publish file {} into {}'.format(iFile, oFile))
|
||||
shutil.copyfile(iFile, oFile)
|
||||
chunk.logger.info('Publish end')
|
||||
finally:
|
||||
chunk.logManager.end()
|
||||
|
|
|
@ -221,31 +221,31 @@ Upload a textured mesh on Sketchfab.
|
|||
return self._stopped
|
||||
|
||||
def processChunk(self, chunk):
|
||||
self._stopped = False
|
||||
chunk.logManager.waitUntilCleared()
|
||||
chunk.logger.setLevel(chunk.logManager.textToLevel(chunk.node.verboseLevel.value))
|
||||
|
||||
if not chunk.node.inputFiles:
|
||||
chunk.logger.warning('Nothing to upload')
|
||||
return
|
||||
if chunk.node.apiToken.value == '':
|
||||
chunk.logger.error('Need API token.')
|
||||
raise RuntimeError()
|
||||
if len(chunk.node.title.value) > 48:
|
||||
chunk.logger.error('Title cannot be longer than 48 characters.')
|
||||
raise RuntimeError()
|
||||
if len(chunk.node.description.value) > 1024:
|
||||
chunk.logger.error('Description cannot be longer than 1024 characters.')
|
||||
raise RuntimeError()
|
||||
tags = [ i.value.replace(' ', '-') for i in chunk.node.tags.value.values() ]
|
||||
if all(len(i) > 48 for i in tags) and len(tags) > 0:
|
||||
chunk.logger.error('Tags cannot be longer than 48 characters.')
|
||||
raise RuntimeError()
|
||||
if len(tags) > 42:
|
||||
chunk.logger.error('Maximum of 42 separate tags.')
|
||||
raise RuntimeError()
|
||||
|
||||
try:
|
||||
self._stopped = False
|
||||
chunk.logManager.start(chunk.node.verboseLevel.value)
|
||||
uploadFile = ''
|
||||
|
||||
if not chunk.node.inputFiles:
|
||||
chunk.logger.warning('Nothing to upload')
|
||||
return
|
||||
if chunk.node.apiToken.value == '':
|
||||
chunk.logger.error('Need API token.')
|
||||
raise RuntimeError()
|
||||
if len(chunk.node.title.value) > 48:
|
||||
chunk.logger.error('Title cannot be longer than 48 characters.')
|
||||
raise RuntimeError()
|
||||
if len(chunk.node.description.value) > 1024:
|
||||
chunk.logger.error('Description cannot be longer than 1024 characters.')
|
||||
raise RuntimeError()
|
||||
tags = [ i.value.replace(' ', '-') for i in chunk.node.tags.value.values() ]
|
||||
if all(len(i) > 48 for i in tags) and len(tags) > 0:
|
||||
chunk.logger.error('Tags cannot be longer than 48 characters.')
|
||||
raise RuntimeError()
|
||||
if len(tags) > 42:
|
||||
chunk.logger.error('Maximum of 42 separate tags.')
|
||||
raise RuntimeError()
|
||||
|
||||
data = {
|
||||
'name': chunk.node.title.value,
|
||||
'description': chunk.node.description.value,
|
||||
|
@ -281,5 +281,7 @@ Upload a textured mesh on Sketchfab.
|
|||
os.remove(uploadFile)
|
||||
chunk.logger.debug('Deleted {}'.format(uploadFile))
|
||||
|
||||
chunk.logManager.end()
|
||||
|
||||
def stopProcess(self, chunk):
|
||||
self._stopped = True
|
||||
|
|
|
@ -115,6 +115,18 @@ It iterates like that, adding cameras and triangulating new 2D features into 3D
|
|||
uid=[0],
|
||||
advanced=True,
|
||||
),
|
||||
desc.ChoiceParam(
|
||||
name='observationConstraint',
|
||||
label='Observation Constraint',
|
||||
description='Observation contraint mode used in the optimization:\n'
|
||||
' * Basic: Use standard reprojection error in pixel coordinates\n'
|
||||
' * Scale: Use reprojection error in pixel coordinates but relative to the feature scale',
|
||||
value='Basic',
|
||||
values=['Basic', 'Scale'],
|
||||
exclusive=True,
|
||||
uid=[0],
|
||||
advanced=True,
|
||||
),
|
||||
desc.IntParam(
|
||||
name='localizerEstimatorMaxIterations',
|
||||
label='Localizer Max Ransac Iterations',
|
||||
|
|
3
setup.py
3
setup.py
|
@ -41,6 +41,9 @@ class PlatformExecutable(Executable):
|
|||
build_exe_options = {
|
||||
# include dynamically loaded plugins
|
||||
"packages": ["meshroom.nodes", "meshroom.submitters"],
|
||||
"includes": [
|
||||
"idna.idnadata", # Dependency needed by SketchfabUpload node, but not detected by cx_Freeze
|
||||
],
|
||||
"include_files": ["CHANGES.md", "COPYING.md", "LICENSE-MPL2.md", "README.md"]
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue