#!/usr/bin/env python import argparse import os import sys import distutils.util import json import re from meshroom import setupEnvironment setupEnvironment() import meshroom.core.graph import logging meshroom.core.initPipelines() parser = argparse.ArgumentParser( prog='meshroom_batch', description='Launch a Meshroom pipeline from command line.', add_help=True, formatter_class=argparse.RawTextHelpFormatter, epilog=''' Examples: 1. Process a pipeline in command line: meshroom_batch -p cameraTracking -i /input/path -o /output/path -s /path/to/store/the/project.mg 2. Submit a pipeline on renderfarm: meshroom_batch -p cameraTracking -i /input/path -o /output/path -s /path/to/store/the/project.mg --submit See "meshroom_compute -h" to compute an existing project from command line. Additional Resources: Website: https://alicevision.org Manual: https://meshroom-manual.readthedocs.io Forum: https://groups.google.com/g/alicevision Tutorials: https://www.youtube.com/c/AliceVisionOrg Contribute: https://github.com/alicevision/Meshroom ''') general_group = parser.add_argument_group('General Options') general_group.add_argument( '-i', '--input', metavar='NODEINSTANCE:"SFM/FOLDERS/IMAGES;..."', type=str, nargs='*', default=[], help='Input folder containing images or folders of images or file (.sfm or .json) ' 'with images paths and optionally predefined camera intrinsics.') general_group.add_argument( '-I', '--inputRecursive', metavar='NODEINSTANCE:"FOLDERS/IMAGES;..."', type=str, nargs='*', default=[], help='Input folders containing all images recursively.') general_group.add_argument( '-p', '--pipeline', metavar='FILE.mg / PIPELINE', type=str, default=os.environ.get('MESHROOM_DEFAULT_PIPELINE', 'photogrammetry'), help='Template pipeline among those listed or a Meshroom file containing a custom pipeline to run on input images:\n' + '\n'.join([' - ' + p for p in meshroom.core.pipelineTemplates]) + '\nRequirements: the graph must contain one CameraInit node, and one Publish node if --output is set.', ) general_group.add_argument( '-o', '--output', metavar='FOLDER', type=str, required=False, help='Output folder where results should be copied to. ' 'If not set, results will have to be retrieved directly from the cache folder.') general_group.add_argument( '-s', '--save', metavar='FILE', type=str, required=False, help='Save the configured Meshroom graph to a project file. It will setup the cache folder accordingly if not explicitly changed by --cache.') general_group.add_argument( '--submit', help='Submit on renderfarm instead of local computation.', action='store_true') general_group.add_argument( '-v', '--verbose', help='Set the verbosity level for logging:\n' ' - fatal: Show only critical errors.\n' ' - error: Show errors only.\n' ' - warning: Show warnings and errors.\n' ' - info: Show standard informational messages.\n' ' - debug: Show detailed debug information.\n' ' - trace: Show all messages, including trace-level details.', default=os.environ.get('MESHROOM_VERBOSE', 'warning'), choices=['fatal', 'error', 'warning', 'info', 'debug', 'trace']) advanced_group = parser.add_argument_group('Advanced Options') advanced_group.add_argument( '--overrides', metavar='SETTINGS', type=str, default=None, help='A JSON file containing the graph parameters override.') advanced_group.add_argument( '--paramOverrides', metavar='NODETYPE:param=value NODEINSTANCE.param=value', type=str, default=None, nargs='*', help='Override specific parameters directly from the command line (by node type or by node names).') advanced_group.add_argument( '--cache', metavar='FOLDER', type=str, default=None, help='Custom cache folder to write computation results. ' 'If not set, the default cache folder will be used: ' + meshroom.core.defaultCacheFolder) advanced_group.add_argument( '--compute', metavar='', type=lambda x: bool(distutils.util.strtobool(x)), default=True, required=False, help='You can set it to to disable the computation.') advanced_group.add_argument( '--toNode', metavar='NODE', type=str, nargs='*', default=None, help='Process the node(s) with its dependencies.') advanced_group.add_argument( '--forceStatus', help='Force computation if status is RUNNING or SUBMITTED.', action='store_true') advanced_group.add_argument( '--forceCompute', help='Compute in all cases even if already computed.', action='store_true') advanced_group.add_argument( "--submitLabel", type=str, default=os.environ.get('MESHROOM_SUBMIT_LABEL', '[Meshroom] {projectName}'), help="Label of a node when submitted on renderfarm.") advanced_group.add_argument( '--submitter', type=str, default='SimpleFarm', help='Execute job with a specific submitter.') args = parser.parse_args() logStringToPython = { 'fatal': logging.FATAL, 'error': logging.ERROR, 'warning': logging.WARNING, 'info': logging.INFO, 'debug': logging.DEBUG, 'trace': logging.DEBUG, } logging.getLogger().setLevel(logStringToPython[args.verbose]) if not args.input and not args.inputRecursive: print('Nothing to compute. You need to set --input or --inputRecursive.') sys.exit(1) meshroom.core.initNodes() graph = meshroom.core.graph.Graph(name=args.pipeline) with meshroom.core.graph.GraphModification(graph): # initialize template pipeline loweredPipelineTemplates = dict((k.lower(), v) for k, v in meshroom.core.pipelineTemplates.items()) if args.pipeline.lower() in loweredPipelineTemplates: graph.initFromTemplate(loweredPipelineTemplates[args.pipeline.lower()], publishOutputs=True if args.output else False) else: # custom pipeline graph.initFromTemplate(args.pipeline, publishOutputs=True if args.output else False) def parseInputs(inputs, uniqueInitNode): """Utility method for parsing the input and inputRecursive arguments.""" mapInputs = {} for inp in inputs: # Stop after the first occurrence to handle multiple input paths on Windows platforms inputGroup = inp.split(':', 1) nodeName = None if len(inputGroup) == 1 and uniqueInitNode: nodeName = uniqueInitNode.getName() elif len(inputGroup) == 2: # If inputGroup[0] is a valid node name, use it as such. if (inputGroup[0] in graph.toDict().keys()): nodeName = inputGroup[0] # Otherwise, the platform might be Windows and inputGroup[0] is part of the input path: # in that case, we should behave as if len(inputGroup) == 1, use the uniqueInitNode's name # and re-join the path together. elif uniqueInitNode: nodeName = uniqueInitNode.getName() inputGroup = [":".join(inputGroup)] else: print('Syntax error in input argument') # If a node name has been provided and the platform is Windows, the length of inputGroup might be # 3: first the node name, then the first path of the input path, then the rest of the input path. # The input path should also be re-joined here. elif len(inputGroup) == 3: nodeName = inputGroup[0] inputGroup[1] = ":".join(inputGroup[1:]) inputGroup.pop(-1) else: print('Syntax error in input argument') sys.exit(1) nodeInputs = inputGroup[-1].split(';') mapInputs[nodeName] = [os.path.abspath(path) for path in nodeInputs] return mapInputs # get init nodes initNodes = graph.findInitNodes() uniqueInitNode = initNodes[0] if (len(initNodes) == 1) else None # parse inputs for each init node mapInput = parseInputs(args.input, uniqueInitNode) # parse recursive inputs for each init node mapInputRecursive = parseInputs(args.inputRecursive, uniqueInitNode) # feed inputs and recursive inputs to init nodes for initNode in initNodes: nodeName = initNode.getName() inp = mapInput.get(nodeName) inpRec = mapInputRecursive.get(nodeName) if not inp and not inpRec: print(f'Nothing to compute for node {nodeName}.') sys.exit(1) initNode.nodeDesc.initialize(initNode, inp, inpRec) if not graph.canComputeLeaves: raise RuntimeError("Graph cannot be computed. Check for compatibility issues.") if args.verbose: graph.setVerbose(args.verbose) if args.output: # if there is more than 1 Publish node, they will all be set to the same output; # depending on what they are connected to, some input files might be overwritten in the output folder # (e.g. if two Publish nodes are connected to two Texturing nodes) publishNodes = graph.nodesOfType('Publish') if len(publishNodes) > 0: for node in publishNodes: node.output.value = os.path.abspath(args.output) else: raise RuntimeError("meshroom_batch requires a pipeline graph with at least one Publish node, none found.") if args.overrides: with open(args.overrides, 'r', encoding='utf-8', errors='ignore') as f: data = json.load(f) for nodeName, overrides in data.items(): for attrName, value in overrides.items(): graph.findNode(nodeName).attribute(attrName).value = value if args.paramOverrides: print("\n") reExtract = re.compile('(\w+)([:.])(\w[\w.]*)=(.*)') for p in args.paramOverrides: result = reExtract.match(p) if not result: raise ValueError('Invalid param override: ' + str(p)) node, t, param, value = result.groups() if t == ':': nodesOfType = graph.nodesOfType(node) if not nodesOfType: raise ValueError('No node with the type "{}" in the scene.'.format(node)) for n in nodesOfType: print('Overrides {node}.{param}={value}'.format(node=node, param=param, value=value)) n.attribute(param).value = value elif t == '.': print('Overrides {node}.{param}={value}'.format(node=node, param=param, value=value)) graph.findNode(node).attribute(param).value = value else: raise ValueError('Invalid param override: ' + str(p)) print("\n") # setup cache directory graph.cacheDir = args.cache if args.cache else meshroom.core.defaultCacheFolder if args.save: graph.save(args.save, setupProjectFile=not bool(args.cache)) print('File successfully saved: "{}"'.format(args.save)) if not args.output: print('No output set, results will be available in the cache folder: "{}"'.format(graph.cacheDir)) # find end nodes (None will compute all graph) toNodes = graph.findNodes(args.toNode) if args.toNode else None if args.submit: meshroom.core.initSubmitters() if not args.save: raise ValueError('Need to save the project to file to submit on renderfarm.') # submit on renderfarm meshroom.core.graph.submit(args.save, args.submitter, toNode=args.toNode, submitLabel=args.submitLabel) elif args.compute: # find end nodes (None will compute all graph) toNodes = graph.findNodes(args.toNode) if args.toNode else None # start computation meshroom.core.graph.executeGraph(graph, toNodes=toNodes, forceCompute=args.forceCompute, forceStatus=args.forceStatus)