mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Maya: Tweak logging for artist-facing reports in publisher
This commit is contained in:
parent
fd64e261df
commit
2b951d29f2
57 changed files with 217 additions and 227 deletions
|
|
@ -35,14 +35,11 @@ class CollectAssembly(pyblish.api.InstancePlugin):
|
|||
# Get all content from the instance
|
||||
instance_lookup = set(cmds.ls(instance, type="transform", long=True))
|
||||
data = defaultdict(list)
|
||||
self.log.info(instance_lookup)
|
||||
|
||||
hierarchy_nodes = []
|
||||
for container in containers:
|
||||
|
||||
self.log.info(container)
|
||||
root = lib.get_container_transforms(container, root=True)
|
||||
self.log.info(root)
|
||||
if not root or root not in instance_lookup:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -356,8 +356,9 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
# Thus the data will be limited to only what we need.
|
||||
self.log.debug("obj_set {}".format(sets[obj_set]))
|
||||
if not sets[obj_set]["members"]:
|
||||
self.log.info(
|
||||
"Removing redundant set information: {}".format(obj_set))
|
||||
self.log.debug(
|
||||
"Removing redundant set information: {}".format(obj_set)
|
||||
)
|
||||
sets.pop(obj_set, None)
|
||||
|
||||
self.log.debug("Gathering attribute changes to instance members..")
|
||||
|
|
@ -396,9 +397,9 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if con:
|
||||
materials.extend(con)
|
||||
|
||||
self.log.info("Found materials:\n{}".format(materials))
|
||||
self.log.debug("Found materials:\n{}".format(materials))
|
||||
|
||||
self.log.info("Found the following sets:\n{}".format(look_sets))
|
||||
self.log.debug("Found the following sets:\n{}".format(look_sets))
|
||||
# Get the entire node chain of the look sets
|
||||
# history = cmds.listHistory(look_sets)
|
||||
history = []
|
||||
|
|
@ -456,7 +457,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
instance.extend(shader for shader in look_sets if shader
|
||||
not in instance_lookup)
|
||||
|
||||
self.log.info("Collected look for %s" % instance)
|
||||
self.log.debug("Collected look for %s" % instance)
|
||||
|
||||
def collect_sets(self, instance):
|
||||
"""Collect all objectSets which are of importance for publishing
|
||||
|
|
@ -593,7 +594,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if attribute == "fileTextureName":
|
||||
computed_attribute = node + ".computedFileTextureNamePattern"
|
||||
|
||||
self.log.info(" - file source: {}".format(source))
|
||||
self.log.debug(" - file source: {}".format(source))
|
||||
color_space_attr = "{}.colorSpace".format(node)
|
||||
try:
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
|
|
@ -621,7 +622,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
dependNode=True)
|
||||
)
|
||||
if not source and cmds.nodeType(node) in pxr_nodes:
|
||||
self.log.info("Renderman: source is empty, skipping...")
|
||||
self.log.debug("Renderman: source is empty, skipping...")
|
||||
continue
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
|
|
@ -630,14 +631,14 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
files = get_file_node_files(node)
|
||||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
self.log.debug("No valid files found from node `%s`" % node)
|
||||
|
||||
self.log.info("collection of resource done:")
|
||||
self.log.info(" - node: {}".format(node))
|
||||
self.log.info(" - attribute: {}".format(attribute))
|
||||
self.log.info(" - source: {}".format(source))
|
||||
self.log.info(" - file: {}".format(files))
|
||||
self.log.info(" - color space: {}".format(color_space))
|
||||
self.log.debug("collection of resource done:")
|
||||
self.log.debug(" - node: {}".format(node))
|
||||
self.log.debug(" - attribute: {}".format(attribute))
|
||||
self.log.debug(" - source: {}".format(source))
|
||||
self.log.debug(" - file: {}".format(files))
|
||||
self.log.debug(" - color space: {}".format(color_space))
|
||||
|
||||
# Define the resource
|
||||
yield {
|
||||
|
|
|
|||
|
|
@ -268,7 +268,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
|
|||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
import multiverse
|
||||
|
||||
self.log.info("Processing mvLook for '{}'".format(instance))
|
||||
self.log.debug("Processing mvLook for '{}'".format(instance))
|
||||
|
||||
nodes = set()
|
||||
for node in instance:
|
||||
|
|
@ -287,7 +287,7 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
|
|||
publishMipMap = instance.data["publishMipMap"]
|
||||
|
||||
for node in nodes:
|
||||
self.log.info("Getting resources for '{}'".format(node))
|
||||
self.log.debug("Getting resources for '{}'".format(node))
|
||||
|
||||
# We know what nodes need to be collected, now we need to
|
||||
# extract the materials overrides.
|
||||
|
|
@ -380,12 +380,12 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
|
|||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
|
||||
self.log.info("collection of resource done:")
|
||||
self.log.info(" - node: {}".format(node))
|
||||
self.log.info(" - attribute: {}".format(fname_attrib))
|
||||
self.log.info(" - source: {}".format(source))
|
||||
self.log.info(" - file: {}".format(files))
|
||||
self.log.info(" - color space: {}".format(color_space))
|
||||
self.log.debug("collection of resource done:")
|
||||
self.log.debug(" - node: {}".format(node))
|
||||
self.log.debug(" - attribute: {}".format(fname_attrib))
|
||||
self.log.debug(" - source: {}".format(source))
|
||||
self.log.debug(" - file: {}".format(files))
|
||||
self.log.debug(" - color space: {}".format(color_space))
|
||||
|
||||
# Define the resource
|
||||
resource = {"node": node,
|
||||
|
|
@ -406,14 +406,14 @@ class CollectMultiverseLookData(pyblish.api.InstancePlugin):
|
|||
extra_files = []
|
||||
self.log.debug("Expecting MipMaps, going to look for them.")
|
||||
for fname in files:
|
||||
self.log.info("Checking '{}' for mipmaps".format(fname))
|
||||
self.log.debug("Checking '{}' for mipmaps".format(fname))
|
||||
if is_mipmap(fname):
|
||||
self.log.debug(" - file is already MipMap, skipping.")
|
||||
continue
|
||||
|
||||
mipmap = get_mipmap(fname)
|
||||
if mipmap:
|
||||
self.log.info(" mipmap found for '{}'".format(fname))
|
||||
self.log.debug(" mipmap found for '{}'".format(fname))
|
||||
extra_files.append(mipmap)
|
||||
else:
|
||||
self.log.warning(" no mipmap found for '{}'".format(fname))
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
"family": cmds.getAttr("{}.family".format(s)),
|
||||
}
|
||||
)
|
||||
self.log.info(" -> attach render to: {}".format(s))
|
||||
self.log.debug(" -> attach render to: {}".format(s))
|
||||
|
||||
layer_name = layer.name()
|
||||
|
||||
|
|
@ -137,10 +137,10 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
has_cameras = any(product.camera for product in render_products)
|
||||
assert has_cameras, "No render cameras found."
|
||||
|
||||
self.log.info("multipart: {}".format(
|
||||
self.log.debug("multipart: {}".format(
|
||||
multipart))
|
||||
assert expected_files, "no file names were generated, this is a bug"
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"expected files: {}".format(
|
||||
json.dumps(expected_files, indent=4, sort_keys=True)
|
||||
)
|
||||
|
|
@ -175,7 +175,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
publish_meta_path = os.path.dirname(full_path)
|
||||
aov_dict[aov_first_key] = full_paths
|
||||
full_exp_files = [aov_dict]
|
||||
self.log.info(full_exp_files)
|
||||
self.log.debug(full_exp_files)
|
||||
|
||||
if publish_meta_path is None:
|
||||
raise KnownPublishError("Unable to detect any expected output "
|
||||
|
|
@ -227,7 +227,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
common_publish_meta_path = "/" + common_publish_meta_path
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Publish meta path: {}".format(common_publish_meta_path))
|
||||
|
||||
# Get layer specific settings, might be overrides
|
||||
|
|
@ -300,7 +300,7 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
)
|
||||
if rr_settings["enabled"]:
|
||||
data["rrPathName"] = instance.data.get("rrPathName")
|
||||
self.log.info(data["rrPathName"])
|
||||
self.log.debug(data["rrPathName"])
|
||||
|
||||
if self.sync_workfile_version:
|
||||
data["version"] = context.data["version"]
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
|||
|
||||
# Get renderer
|
||||
renderer = instance.data["renderer"]
|
||||
self.log.info("Renderer found: {}".format(renderer))
|
||||
self.log.debug("Renderer found: {}".format(renderer))
|
||||
|
||||
rp_node_types = {"vray": ["VRayRenderElement", "VRayRenderElementSet"],
|
||||
"arnold": ["aiAOV"],
|
||||
|
|
@ -66,8 +66,8 @@ class CollectRenderLayerAOVS(pyblish.api.InstancePlugin):
|
|||
|
||||
result.append(render_pass)
|
||||
|
||||
self.log.info("Found {} render elements / AOVs for "
|
||||
"'{}'".format(len(result), instance.data["subset"]))
|
||||
self.log.debug("Found {} render elements / AOVs for "
|
||||
"'{}'".format(len(result), instance.data["subset"]))
|
||||
|
||||
instance.data["renderPasses"] = result
|
||||
|
||||
|
|
|
|||
|
|
@ -21,11 +21,12 @@ class CollectRenderableCamera(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
layer = instance.data["renderlayer"]
|
||||
|
||||
self.log.info("layer: {}".format(layer))
|
||||
cameras = cmds.ls(type="camera", long=True)
|
||||
renderable = [c for c in cameras if
|
||||
get_attr_in_layer("%s.renderable" % c, layer)]
|
||||
renderable = [cam for cam in cameras if
|
||||
get_attr_in_layer("{}.renderable".format(cam), layer)]
|
||||
|
||||
self.log.info("Found cameras %s: %s" % (len(renderable), renderable))
|
||||
self.log.debug(
|
||||
"Found renderable cameras %s: %s", len(renderable), renderable
|
||||
)
|
||||
|
||||
instance.data["cameras"] = renderable
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class CollectUnrealStaticMesh(pyblish.api.InstancePlugin):
|
|||
instance.data["geometryMembers"] = cmds.sets(
|
||||
geometry_set, query=True)
|
||||
|
||||
self.log.info("geometry: {}".format(
|
||||
self.log.debug("geometry: {}".format(
|
||||
pformat(instance.data.get("geometryMembers"))))
|
||||
|
||||
collision_set = [
|
||||
|
|
@ -29,7 +29,7 @@ class CollectUnrealStaticMesh(pyblish.api.InstancePlugin):
|
|||
instance.data["collisionMembers"] = cmds.sets(
|
||||
collision_set, query=True)
|
||||
|
||||
self.log.info("collisions: {}".format(
|
||||
self.log.debug("collisions: {}".format(
|
||||
pformat(instance.data.get("collisionMembers"))))
|
||||
|
||||
frame = cmds.currentTime(query=True)
|
||||
|
|
|
|||
|
|
@ -67,5 +67,5 @@ class CollectXgen(pyblish.api.InstancePlugin):
|
|||
|
||||
data["transfers"] = transfers
|
||||
|
||||
self.log.info(data)
|
||||
self.log.debug(data)
|
||||
instance.data.update(data)
|
||||
|
|
|
|||
|
|
@ -119,7 +119,6 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
texture_filenames = []
|
||||
if image_search_paths:
|
||||
|
||||
|
||||
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
|
||||
# Later on check whether this is pipeline OS cross-compatible.
|
||||
image_search_paths = [p for p in
|
||||
|
|
@ -130,13 +129,13 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.info("Found %i texture(s)" % len(texture_filenames))
|
||||
self.log.debug("Found %i texture(s)" % len(texture_filenames))
|
||||
|
||||
# Get all reference nodes
|
||||
reference_nodes = cmds.pgYetiGraph(node,
|
||||
listNodes=True,
|
||||
type="reference")
|
||||
self.log.info("Found %i reference node(s)" % len(reference_nodes))
|
||||
self.log.debug("Found %i reference node(s)" % len(reference_nodes))
|
||||
|
||||
if texture_filenames and not image_search_paths:
|
||||
raise ValueError("pgYetiMaya node '%s' is missing the path to the "
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
||||
|
|
@ -126,7 +126,7 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
instance.data["representations"].append(representation)
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Writing {} with:\n{}".format(kwargs["filename"], kwargs)
|
||||
)
|
||||
filenames = []
|
||||
|
|
@ -180,12 +180,12 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
|
||||
with lib.attribute_values(attribute_data):
|
||||
with lib.maintained_selection():
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Writing: {}".format(duplicate_nodes)
|
||||
)
|
||||
cmds.select(duplicate_nodes, noExpand=True)
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
|
|
@ -194,6 +194,6 @@ class ExtractArnoldSceneSource(publish.Extractor):
|
|||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
self.log.debug("Exported: {}".format(filenames))
|
||||
|
||||
return filenames, nodes_by_id
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class ExtractAssembly(publish.Extractor):
|
|||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
self.log.info("Dumping scene data for debugging ..")
|
||||
self.log.debug("Dumping scene data for debugging ..")
|
||||
with open(json_path, "w") as filepath:
|
||||
json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ class ExtractCameraAlembic(publish.Extractor):
|
|||
"Attributes to bake must be specified as a list"
|
||||
)
|
||||
for attr in self.bake_attributes:
|
||||
self.log.info("Adding {} attribute".format(attr))
|
||||
self.log.debug("Adding {} attribute".format(attr))
|
||||
job_str += " -attr {0}".format(attr)
|
||||
|
||||
with lib.evaluation("off"):
|
||||
|
|
@ -112,5 +112,5 @@ class ExtractCameraAlembic(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '{0}' to: {1}".format(
|
||||
self.log.debug("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, path))
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ class ExtractCameraMayaScene(publish.Extractor):
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except KeyError:
|
||||
|
|
@ -151,7 +151,7 @@ class ExtractCameraMayaScene(publish.Extractor):
|
|||
with lib.evaluation("off"):
|
||||
with lib.suspended_refresh():
|
||||
if bake_to_worldspace:
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Performing camera bakes: {}".format(transform))
|
||||
baked = lib.bake_to_world_space(
|
||||
transform,
|
||||
|
|
@ -186,7 +186,7 @@ class ExtractCameraMayaScene(publish.Extractor):
|
|||
unlock(plug)
|
||||
cmds.setAttr(plug, value)
|
||||
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.debug("Performing extraction..")
|
||||
cmds.select(cmds.ls(members, dag=True,
|
||||
shapes=True, long=True), noExpand=True)
|
||||
cmds.file(path,
|
||||
|
|
@ -217,5 +217,5 @@ class ExtractCameraMayaScene(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '{0}' to: {1}".format(
|
||||
self.log.debug("Extracted instance '{0}' to: {1}".format(
|
||||
instance.name, path))
|
||||
|
|
|
|||
|
|
@ -33,11 +33,11 @@ class ExtractFBX(publish.Extractor):
|
|||
# to format it into a string in a mel expression
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
self.log.info("Extracting FBX to: {0}".format(path))
|
||||
self.log.debug("Extracting FBX to: {0}".format(path))
|
||||
|
||||
members = instance.data["setMembers"]
|
||||
self.log.info("Members: {0}".format(members))
|
||||
self.log.info("Instance: {0}".format(instance[:]))
|
||||
self.log.debug("Members: {0}".format(members))
|
||||
self.log.debug("Instance: {0}".format(instance[:]))
|
||||
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
|
||||
|
|
@ -58,4 +58,4 @@ class ExtractFBX(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract FBX successful to: {0}".format(path))
|
||||
self.log.debug("Extract FBX successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -20,14 +20,10 @@ class ExtractGLB(publish.Extractor):
|
|||
filename = "{0}.glb".format(instance.name)
|
||||
path = os.path.join(staging_dir, filename)
|
||||
|
||||
self.log.info("Extracting GLB to: {}".format(path))
|
||||
|
||||
cmds.loadPlugin("maya2glTF", quiet=True)
|
||||
|
||||
nodes = instance[:]
|
||||
|
||||
self.log.info("Instance: {0}".format(nodes))
|
||||
|
||||
start_frame = instance.data('frameStart') or \
|
||||
int(cmds.playbackOptions(query=True,
|
||||
animationStartTime=True))# noqa
|
||||
|
|
@ -48,6 +44,7 @@ class ExtractGLB(publish.Extractor):
|
|||
"vno": True # visibleNodeOnly
|
||||
}
|
||||
|
||||
self.log.debug("Extracting GLB to: {}".format(path))
|
||||
with lib.maintained_selection():
|
||||
cmds.select(nodes, hi=True, noExpand=True)
|
||||
extract_gltf(staging_dir,
|
||||
|
|
@ -65,4 +62,4 @@ class ExtractGLB(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract GLB successful to: {0}".format(path))
|
||||
self.log.debug("Extract GLB successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -60,6 +60,6 @@ class ExtractGPUCache(publish.Extractor):
|
|||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class ExtractImportReference(publish.Extractor,
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ class ExtractImportReference(publish.Extractor,
|
|||
reference_path = os.path.join(dir_path, ref_scene_name)
|
||||
tmp_path = os.path.dirname(current_name) + "/" + ref_scene_name
|
||||
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.debug("Performing extraction..")
|
||||
|
||||
# This generates script for mayapy to take care of reference
|
||||
# importing outside current session. It is passing current scene
|
||||
|
|
@ -111,7 +111,7 @@ print("*** Done")
|
|||
# process until handles are closed by context manager.
|
||||
with tempfile.TemporaryDirectory() as tmp_dir_name:
|
||||
tmp_script_path = os.path.join(tmp_dir_name, "import_ref.py")
|
||||
self.log.info("Using script file: {}".format(tmp_script_path))
|
||||
self.log.debug("Using script file: {}".format(tmp_script_path))
|
||||
with open(tmp_script_path, "wt") as tmp:
|
||||
tmp.write(script)
|
||||
|
||||
|
|
@ -149,9 +149,9 @@ print("*** Done")
|
|||
"stagingDir": os.path.dirname(current_name),
|
||||
"outputName": "imported"
|
||||
}
|
||||
self.log.info("%s" % ref_representation)
|
||||
self.log.debug(ref_representation)
|
||||
|
||||
instance.data["representations"].append(ref_representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to : '%s'" % (ref_scene_name,
|
||||
reference_path))
|
||||
self.log.debug("Extracted instance '%s' to : '%s'" % (ref_scene_name,
|
||||
reference_path))
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ExtractLayout(publish.Extractor):
|
|||
stagingdir = self.staging_dir(instance)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
self.log.debug("Performing extraction..")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -64,7 +64,7 @@ class ExtractLayout(publish.Extractor):
|
|||
fields=["parent", "context.family"]
|
||||
)
|
||||
|
||||
self.log.info(representation)
|
||||
self.log.debug(representation)
|
||||
|
||||
version_id = representation.get("parent")
|
||||
family = representation.get("context").get("family")
|
||||
|
|
@ -159,5 +159,5 @@ class ExtractLayout(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s",
|
||||
instance.name, json_representation)
|
||||
self.log.debug("Extracted instance '%s' to: %s",
|
||||
instance.name, json_representation)
|
||||
|
|
|
|||
|
|
@ -307,7 +307,7 @@ class MakeTX(TextureProcessor):
|
|||
|
||||
render_colorspace = color_management["rendering_space"]
|
||||
|
||||
self.log.info("tx: converting colorspace {0} "
|
||||
self.log.debug("tx: converting colorspace {0} "
|
||||
"-> {1}".format(colorspace,
|
||||
render_colorspace))
|
||||
args.extend(["--colorconvert", colorspace, render_colorspace])
|
||||
|
|
@ -331,7 +331,7 @@ class MakeTX(TextureProcessor):
|
|||
if not os.path.exists(resources_dir):
|
||||
os.makedirs(resources_dir)
|
||||
|
||||
self.log.info("Generating .tx file for %s .." % source)
|
||||
self.log.debug("Generating .tx file for %s .." % source)
|
||||
|
||||
subprocess_args = maketx_args + [
|
||||
"-v", # verbose
|
||||
|
|
@ -421,7 +421,7 @@ class ExtractLook(publish.Extractor):
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except KeyError:
|
||||
|
|
@ -453,7 +453,7 @@ class ExtractLook(publish.Extractor):
|
|||
relationships = lookdata["relationships"]
|
||||
sets = list(relationships.keys())
|
||||
if not sets:
|
||||
self.log.info("No sets found for the look")
|
||||
self.log.debug("No sets found for the look")
|
||||
return
|
||||
|
||||
# Specify texture processing executables to activate
|
||||
|
|
@ -485,7 +485,7 @@ class ExtractLook(publish.Extractor):
|
|||
remap = results["attrRemap"]
|
||||
|
||||
# Extract in correct render layer
|
||||
self.log.info("Extracting look maya scene file: {}".format(maya_path))
|
||||
self.log.debug("Extracting look maya scene file: {}".format(maya_path))
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with lib.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
|
|
@ -511,12 +511,12 @@ class ExtractLook(publish.Extractor):
|
|||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {
|
||||
"attributes": lookdata["attributes"],
|
||||
"relationships": relationships
|
||||
}
|
||||
|
||||
self.log.debug("Extracting json file: {}".format(json_path))
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
|
|
@ -557,8 +557,8 @@ class ExtractLook(publish.Extractor):
|
|||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
self.log.debug("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
|
||||
def _set_resource_result_colorspace(self, resource, colorspace):
|
||||
"""Update resource resulting colorspace after texture processing"""
|
||||
|
|
@ -589,14 +589,13 @@ class ExtractLook(publish.Extractor):
|
|||
resources = instance.data["resources"]
|
||||
color_management = lib.get_color_management_preferences()
|
||||
|
||||
# Temporary fix to NOT create hardlinks on windows machines
|
||||
if platform.system().lower() == "windows":
|
||||
self.log.info(
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
if not force_copy and platform.system().lower() == "windows":
|
||||
# Temporary fix to NOT create hardlinks on windows machines
|
||||
self.log.warning(
|
||||
"Forcing copy instead of hardlink due to issues on Windows..."
|
||||
)
|
||||
force_copy = True
|
||||
else:
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
|
||||
destinations_cache = {}
|
||||
|
||||
|
|
@ -671,11 +670,11 @@ class ExtractLook(publish.Extractor):
|
|||
destination = get_resource_destination_cached(source)
|
||||
if force_copy or texture_result.transfer_mode == COPY:
|
||||
transfers.append((source, destination))
|
||||
self.log.info('file will be copied {} -> {}'.format(
|
||||
self.log.debug('file will be copied {} -> {}'.format(
|
||||
source, destination))
|
||||
elif texture_result.transfer_mode == HARDLINK:
|
||||
hardlinks.append((source, destination))
|
||||
self.log.info('file will be hardlinked {} -> {}'.format(
|
||||
self.log.debug('file will be hardlinked {} -> {}'.format(
|
||||
source, destination))
|
||||
|
||||
# Store the hashes from hash to destination to include in the
|
||||
|
|
@ -707,7 +706,7 @@ class ExtractLook(publish.Extractor):
|
|||
color_space_attr = "{}.colorSpace".format(node)
|
||||
remap[color_space_attr] = resource["result_color_space"]
|
||||
|
||||
self.log.info("Finished remapping destinations ...")
|
||||
self.log.debug("Finished remapping destinations ...")
|
||||
|
||||
return {
|
||||
"fileTransfers": transfers,
|
||||
|
|
@ -815,8 +814,8 @@ class ExtractLook(publish.Extractor):
|
|||
if not processed_result:
|
||||
raise RuntimeError("Texture Processor {} returned "
|
||||
"no result.".format(processor))
|
||||
self.log.info("Generated processed "
|
||||
"texture: {}".format(processed_result.path))
|
||||
self.log.debug("Generated processed "
|
||||
"texture: {}".format(processed_result.path))
|
||||
|
||||
# TODO: Currently all processors force copy instead of allowing
|
||||
# hardlinks using source hashes. This should be refactored
|
||||
|
|
@ -827,7 +826,7 @@ class ExtractLook(publish.Extractor):
|
|||
if not force_copy:
|
||||
existing = self._get_existing_hashed_texture(filepath)
|
||||
if existing:
|
||||
self.log.info("Found hash in database, preparing hardlink..")
|
||||
self.log.debug("Found hash in database, preparing hardlink..")
|
||||
return TextureResult(
|
||||
path=filepath,
|
||||
file_hash=texture_hash,
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except KeyError:
|
||||
|
|
@ -63,7 +63,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
selection += self._get_loaded_containers(members)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
with maintained_selection():
|
||||
cmds.select(selection, noExpand=True)
|
||||
cmds.file(path,
|
||||
|
|
@ -87,7 +87,8 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
self.log.debug("Extracted instance '%s' to: %s" % (instance.name,
|
||||
path))
|
||||
|
||||
@staticmethod
|
||||
def _get_loaded_containers(members):
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ class ExtractModel(publish.Extractor,
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except KeyError:
|
||||
|
|
@ -56,7 +56,7 @@ class ExtractModel(publish.Extractor,
|
|||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
|
|
@ -102,4 +102,5 @@ class ExtractModel(publish.Extractor,
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
self.log.debug("Extracted instance '%s' to: %s" % (instance.name,
|
||||
path))
|
||||
|
|
|
|||
|
|
@ -101,10 +101,10 @@ class ExtractMultiverseLook(publish.Extractor):
|
|||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
self.log.debug("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
|
|
@ -114,7 +114,7 @@ class ExtractMultiverseLook(publish.Extractor):
|
|||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
self.log.debug('Collected object {}'.format(members))
|
||||
if len(members) > 1:
|
||||
self.log.error('More than one member: {}'.format(members))
|
||||
|
||||
|
|
@ -153,5 +153,5 @@ class ExtractMultiverseLook(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
self.log.debug("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
|
|||
|
|
@ -150,7 +150,6 @@ class ExtractMultiverseUsd(publish.Extractor):
|
|||
return options
|
||||
|
||||
def get_default_options(self):
|
||||
self.log.info("ExtractMultiverseUsd get_default_options")
|
||||
return self.default_options
|
||||
|
||||
def filter_members(self, members):
|
||||
|
|
@ -173,19 +172,19 @@ class ExtractMultiverseUsd(publish.Extractor):
|
|||
# Parse export options
|
||||
options = self.get_default_options()
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
self.log.debug("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
self.log.info('Collected objects: {}'.format(members))
|
||||
self.log.debug('Collected objects: {}'.format(members))
|
||||
members = self.filter_members(members)
|
||||
if not members:
|
||||
self.log.error('No members!')
|
||||
return
|
||||
self.log.info(' - filtered: {}'.format(members))
|
||||
self.log.debug(' - filtered: {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
|
|
@ -229,7 +228,7 @@ class ExtractMultiverseUsd(publish.Extractor):
|
|||
self.log.debug(" - {}={}".format(key, value))
|
||||
setattr(asset_write_opts, key, value)
|
||||
|
||||
self.log.info('WriteAsset: {} / {}'.format(file_path, members))
|
||||
self.log.debug('WriteAsset: {} / {}'.format(file_path, members))
|
||||
multiverse.WriteAsset(file_path, members, asset_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -243,7 +242,7 @@ class ExtractMultiverseUsd(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
self.log.debug("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -105,14 +105,14 @@ class ExtractMultiverseUsdComposition(publish.Extractor):
|
|||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
self.log.debug("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
self.log.debug('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
|
|
@ -175,5 +175,5 @@ class ExtractMultiverseUsdComposition(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
self.log.debug("Extracted instance {} to {}".format(instance.name,
|
||||
file_path))
|
||||
|
|
|
|||
|
|
@ -87,10 +87,10 @@ class ExtractMultiverseUsdOverride(publish.Extractor):
|
|||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
self.log.debug("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
|
|
@ -100,7 +100,7 @@ class ExtractMultiverseUsdOverride(publish.Extractor):
|
|||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info("Collected object {}".format(members))
|
||||
self.log.debug("Collected object {}".format(members))
|
||||
|
||||
# TODO: Deal with asset, composition, override with options.
|
||||
import multiverse
|
||||
|
|
@ -153,5 +153,5 @@ class ExtractMultiverseUsdOverride(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
self.log.debug("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ExtractObj(publish.Extractor):
|
|||
# The export requires forward slashes because we need to
|
||||
# format it into a string in a mel expression
|
||||
|
||||
self.log.info("Extracting OBJ to: {0}".format(path))
|
||||
self.log.debug("Extracting OBJ to: {0}".format(path))
|
||||
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
|
|
@ -39,8 +39,8 @@ class ExtractObj(publish.Extractor):
|
|||
type=("mesh", "nurbsCurve"),
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info("Members: {0}".format(members))
|
||||
self.log.info("Instance: {0}".format(instance[:]))
|
||||
self.log.debug("Members: {0}".format(members))
|
||||
self.log.debug("Instance: {0}".format(instance[:]))
|
||||
|
||||
if not cmds.pluginInfo('objExport', query=True, loaded=True):
|
||||
cmds.loadPlugin('objExport')
|
||||
|
|
@ -74,4 +74,4 @@ class ExtractObj(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract OBJ successful to: {0}".format(path))
|
||||
self.log.debug("Extract OBJ successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class ExtractPlayblast(publish.Extractor):
|
|||
self.log.debug("playblast path {}".format(path))
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("Extracting capture..")
|
||||
self.log.debug("Extracting capture..")
|
||||
|
||||
# get scene fps
|
||||
fps = instance.data.get("fps") or instance.context.data.get("fps")
|
||||
|
|
@ -62,7 +62,7 @@ class ExtractPlayblast(publish.Extractor):
|
|||
if end is None:
|
||||
end = cmds.playbackOptions(query=True, animationEndTime=True)
|
||||
|
||||
self.log.info("start: {}, end: {}".format(start, end))
|
||||
self.log.debug("start: {}, end: {}".format(start, end))
|
||||
|
||||
# get cameras
|
||||
camera = instance.data["review_camera"]
|
||||
|
|
@ -119,7 +119,7 @@ class ExtractPlayblast(publish.Extractor):
|
|||
filename = "{0}".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
self.log.info("Outputting images to %s" % path)
|
||||
self.log.debug("Outputting images to %s" % path)
|
||||
|
||||
preset["filename"] = path
|
||||
preset["overwrite"] = True
|
||||
|
|
@ -237,7 +237,7 @@ class ExtractPlayblast(publish.Extractor):
|
|||
self.log.debug("collection head {}".format(filebase))
|
||||
if filebase in filename:
|
||||
frame_collection = collection
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"we found collection of interest {}".format(
|
||||
str(frame_collection)))
|
||||
|
||||
|
|
|
|||
|
|
@ -109,11 +109,11 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy"):
|
||||
self.log.info("No proxy nodes found. Skipping proxy extraction.")
|
||||
self.log.debug("No proxy nodes found. Skipping proxy extraction.")
|
||||
return
|
||||
|
||||
path = path.replace(".abc", "_proxy.abc")
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class ExtractProxyAlembic(publish.Extractor):
|
|||
attr_prefixes = instance.data.get("attrPrefix", "").split(";")
|
||||
attr_prefixes = [value for value in attr_prefixes if value.strip()]
|
||||
|
||||
self.log.info("Extracting Proxy Alembic..")
|
||||
self.log.debug("Extracting Proxy Alembic..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
|
|
@ -82,7 +82,7 @@ class ExtractProxyAlembic(publish.Extractor):
|
|||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
# remove the bounding box
|
||||
bbox_master = cmds.ls("bbox_grp")
|
||||
cmds.delete(bbox_master)
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class ExtractRedshiftProxy(publish.Extractor):
|
|||
# vertex_colors = instance.data.get("vertexColors", False)
|
||||
|
||||
# Write out rs file
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
self.log.debug("Writing: '%s'" % file_path)
|
||||
with maintained_selection():
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.file(file_path,
|
||||
|
|
@ -82,5 +82,5 @@ class ExtractRedshiftProxy(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
self.log.debug("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
|
|
|||
|
|
@ -37,5 +37,5 @@ class ExtractRenderSetup(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Extracted instance '%s' to: %s" % (instance.name, json_path))
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class ExtractRig(publish.Extractor):
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using '.{}' as scene type".format(self.scene_type))
|
||||
break
|
||||
except AttributeError:
|
||||
|
|
@ -39,7 +39,7 @@ class ExtractRig(publish.Extractor):
|
|||
path = os.path.join(dir_path, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
self.log.debug("Performing extraction ...")
|
||||
with maintained_selection():
|
||||
cmds.select(instance, noExpand=True)
|
||||
cmds.file(path,
|
||||
|
|
@ -63,4 +63,4 @@ class ExtractRig(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
self.log.debug("Extracted instance '%s' to: %s", instance.name, path)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class ExtractThumbnail(publish.Extractor):
|
|||
families = ["review"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("Extracting capture..")
|
||||
self.log.debug("Extracting capture..")
|
||||
|
||||
camera = instance.data["review_camera"]
|
||||
|
||||
|
|
@ -96,7 +96,7 @@ class ExtractThumbnail(publish.Extractor):
|
|||
filename = "{0}".format(instance.name)
|
||||
path = os.path.join(dst_staging, filename)
|
||||
|
||||
self.log.info("Outputting images to %s" % path)
|
||||
self.log.debug("Outputting images to %s" % path)
|
||||
|
||||
preset["filename"] = path
|
||||
preset["overwrite"] = True
|
||||
|
|
@ -159,7 +159,7 @@ class ExtractThumbnail(publish.Extractor):
|
|||
|
||||
_, thumbnail = os.path.split(playblast)
|
||||
|
||||
self.log.info("file list {}".format(thumbnail))
|
||||
self.log.debug("file list {}".format(thumbnail))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
|
|||
|
|
@ -57,9 +57,9 @@ class ExtractUnrealSkeletalMeshAbc(publish.Extractor):
|
|||
# to format it into a string in a mel expression
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
self.log.info("Extracting ABC to: {0}".format(path))
|
||||
self.log.info("Members: {0}".format(nodes))
|
||||
self.log.info("Instance: {0}".format(instance[:]))
|
||||
self.log.debug("Extracting ABC to: {0}".format(path))
|
||||
self.log.debug("Members: {0}".format(nodes))
|
||||
self.log.debug("Instance: {0}".format(instance[:]))
|
||||
|
||||
options = {
|
||||
"step": instance.data.get("step", 1.0),
|
||||
|
|
@ -74,7 +74,7 @@ class ExtractUnrealSkeletalMeshAbc(publish.Extractor):
|
|||
"worldSpace": instance.data.get("worldSpace", True)
|
||||
}
|
||||
|
||||
self.log.info("Options: {}".format(options))
|
||||
self.log.debug("Options: {}".format(options))
|
||||
|
||||
if int(cmds.about(version=True)) >= 2017:
|
||||
# Since Maya 2017 alembic supports multiple uv sets - write them.
|
||||
|
|
@ -105,4 +105,4 @@ class ExtractUnrealSkeletalMeshAbc(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract ABC successful to: {0}".format(path))
|
||||
self.log.debug("Extract ABC successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -46,9 +46,9 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
|||
# to format it into a string in a mel expression
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
self.log.info("Extracting FBX to: {0}".format(path))
|
||||
self.log.info("Members: {0}".format(to_extract))
|
||||
self.log.info("Instance: {0}".format(instance[:]))
|
||||
self.log.debug("Extracting FBX to: {0}".format(path))
|
||||
self.log.debug("Members: {0}".format(to_extract))
|
||||
self.log.debug("Instance: {0}".format(instance[:]))
|
||||
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
|||
renamed_to_extract.append("|".join(node_path))
|
||||
|
||||
with renamed(original_parent, parent_node):
|
||||
self.log.info("Extracting: {}".format(renamed_to_extract, path))
|
||||
self.log.debug("Extracting: {}".format(renamed_to_extract, path))
|
||||
fbx_exporter.export(renamed_to_extract, path)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -84,4 +84,4 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract FBX successful to: {0}".format(path))
|
||||
self.log.debug("Extract FBX successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -37,15 +37,15 @@ class ExtractUnrealStaticMesh(publish.Extractor):
|
|||
# to format it into a string in a mel expression
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
self.log.info("Extracting FBX to: {0}".format(path))
|
||||
self.log.info("Members: {0}".format(members))
|
||||
self.log.info("Instance: {0}".format(instance[:]))
|
||||
self.log.debug("Extracting FBX to: {0}".format(path))
|
||||
self.log.debug("Members: {0}".format(members))
|
||||
self.log.debug("Instance: {0}".format(instance[:]))
|
||||
|
||||
fbx_exporter.set_options_from_instance(instance)
|
||||
|
||||
with maintained_selection():
|
||||
with parent_nodes(members):
|
||||
self.log.info("Un-parenting: {}".format(members))
|
||||
self.log.debug("Un-parenting: {}".format(members))
|
||||
fbx_exporter.export(members, path)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -59,4 +59,4 @@ class ExtractUnrealStaticMesh(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extract FBX successful to: {0}".format(path))
|
||||
self.log.debug("Extract FBX successful to: {0}".format(path))
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class ExtractVRayProxy(publish.Extractor):
|
|||
vertex_colors = instance.data.get("vertexColors", False)
|
||||
|
||||
# Write out vrmesh file
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
self.log.debug("Writing: '%s'" % file_path)
|
||||
with maintained_selection():
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.vrayCreateProxy(exportType=1,
|
||||
|
|
@ -68,5 +68,5 @@ class ExtractVRayProxy(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
self.log.debug("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
|
|
|||
|
|
@ -20,13 +20,13 @@ class ExtractVrayscene(publish.Extractor):
|
|||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
if instance.data.get("exportOnFarm"):
|
||||
self.log.info("vrayscenes will be exported on farm.")
|
||||
self.log.debug("vrayscenes will be exported on farm.")
|
||||
raise NotImplementedError(
|
||||
"exporting vrayscenes is not implemented")
|
||||
|
||||
# handle sequence
|
||||
if instance.data.get("vraySceneMultipleFiles"):
|
||||
self.log.info("vrayscenes will be exported on farm.")
|
||||
self.log.debug("vrayscenes will be exported on farm.")
|
||||
raise NotImplementedError(
|
||||
"exporting vrayscene sequences not implemented yet")
|
||||
|
||||
|
|
@ -40,7 +40,6 @@ class ExtractVrayscene(publish.Extractor):
|
|||
layer_name = instance.data.get("layer")
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("staging: {}".format(staging_dir))
|
||||
template = cmds.getAttr("{}.vrscene_filename".format(node))
|
||||
start_frame = instance.data.get(
|
||||
"frameStartHandle") if instance.data.get(
|
||||
|
|
@ -56,21 +55,21 @@ class ExtractVrayscene(publish.Extractor):
|
|||
staging_dir, "vrayscene", *formatted_name.split("/"))
|
||||
|
||||
# Write out vrscene file
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
self.log.debug("Writing: '%s'" % file_path)
|
||||
with maintained_selection():
|
||||
if "*" not in instance.data["setMembers"]:
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Exporting: {}".format(instance.data["setMembers"]))
|
||||
set_members = instance.data["setMembers"]
|
||||
cmds.select(set_members, noExpand=True)
|
||||
else:
|
||||
self.log.info("Exporting all ...")
|
||||
self.log.debug("Exporting all ...")
|
||||
set_members = cmds.ls(
|
||||
long=True, objectsOnly=True,
|
||||
geometry=True, lights=True, cameras=True)
|
||||
cmds.select(set_members, noExpand=True)
|
||||
|
||||
self.log.info("Appending layer name {}".format(layer_name))
|
||||
self.log.debug("Appending layer name {}".format(layer_name))
|
||||
set_members.append(layer_name)
|
||||
|
||||
export_in_rs_layer(
|
||||
|
|
@ -93,8 +92,8 @@ class ExtractVrayscene(publish.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
self.log.debug("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
||||
@staticmethod
|
||||
def format_vray_output_filename(
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ class ExtractWorkfileXgen(publish.Extractor):
|
|||
data[palette] = {attr: old_value}
|
||||
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ class ExtractXgen(publish.Extractor):
|
|||
xgenm.exportPalette(
|
||||
instance.data["xgmPalette"].replace("|", ""), temp_xgen_path
|
||||
)
|
||||
self.log.info("Extracted to {}".format(temp_xgen_path))
|
||||
self.log.debug("Extracted to {}".format(temp_xgen_path))
|
||||
|
||||
# Import xgen onto the duplicate.
|
||||
with maintained_selection():
|
||||
|
|
@ -118,7 +118,7 @@ class ExtractXgen(publish.Extractor):
|
|||
expressions=True
|
||||
)
|
||||
|
||||
self.log.info("Extracted to {}".format(maya_filepath))
|
||||
self.log.debug("Extracted to {}".format(maya_filepath))
|
||||
|
||||
if os.path.exists(temp_xgen_path):
|
||||
os.remove(temp_xgen_path)
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ class ExtractYetiCache(publish.Extractor):
|
|||
else:
|
||||
kwargs.update({"samples": samples})
|
||||
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Writing out cache {} - {}".format(start_frame, end_frame))
|
||||
# Start writing the files for snap shot
|
||||
# <NAME> will be replace by the Yeti node name
|
||||
|
|
@ -53,7 +53,7 @@ class ExtractYetiCache(publish.Extractor):
|
|||
|
||||
cache_files = [x for x in os.listdir(dirname) if x.endswith(".fur")]
|
||||
|
||||
self.log.info("Writing metadata file")
|
||||
self.log.debug("Writing metadata file")
|
||||
settings = instance.data["fursettings"]
|
||||
fursettings_path = os.path.join(dirname, "yeti.fursettings")
|
||||
with open(fursettings_path, "w") as fp:
|
||||
|
|
@ -63,7 +63,7 @@ class ExtractYetiCache(publish.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
self.log.info("cache files: {}".format(cache_files[0]))
|
||||
self.log.debug("cache files: {}".format(cache_files[0]))
|
||||
|
||||
# Workaround: We do not explicitly register these files with the
|
||||
# representation solely so that we can write multiple sequences
|
||||
|
|
@ -87,4 +87,4 @@ class ExtractYetiCache(publish.Extractor):
|
|||
}
|
||||
)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ class ExtractYetiRig(publish.Extractor):
|
|||
for family in self.families:
|
||||
try:
|
||||
self.scene_type = ext_mapping[family]
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Using {} as scene type".format(self.scene_type))
|
||||
break
|
||||
except KeyError:
|
||||
|
|
@ -127,7 +127,7 @@ class ExtractYetiRig(publish.Extractor):
|
|||
maya_path = os.path.join(dirname,
|
||||
"yeti_rig.{}".format(self.scene_type))
|
||||
|
||||
self.log.info("Writing metadata file")
|
||||
self.log.debug("Writing metadata file: {}".format(settings_path))
|
||||
|
||||
image_search_path = resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
|
|
@ -147,7 +147,7 @@ class ExtractYetiRig(publish.Extractor):
|
|||
dst = os.path.join(image_search_path, os.path.basename(file))
|
||||
instance.data['transfers'].append([src, dst])
|
||||
|
||||
self.log.info("adding transfer {} -> {}". format(src, dst))
|
||||
self.log.debug("adding transfer {} -> {}". format(src, dst))
|
||||
|
||||
# Ensure the imageSearchPath is being remapped to the publish folder
|
||||
attr_value = {"%s.imageSearchPath" % n: str(image_search_path) for
|
||||
|
|
@ -182,7 +182,7 @@ class ExtractYetiRig(publish.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
self.log.info("rig file: {}".format(maya_path))
|
||||
self.log.debug("rig file: {}".format(maya_path))
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
'name': self.scene_type,
|
||||
|
|
@ -191,7 +191,7 @@ class ExtractYetiRig(publish.Extractor):
|
|||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
self.log.info("settings file: {}".format(settings_path))
|
||||
self.log.debug("settings file: {}".format(settings_path))
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
'name': 'rigsettings',
|
||||
|
|
@ -201,6 +201,6 @@ class ExtractYetiRig(publish.Extractor):
|
|||
}
|
||||
)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
self.log.debug("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
cmds.select(clear=True)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ResetXgenAttributes(pyblish.api.InstancePlugin):
|
|||
for palette, data in xgen_attributes.items():
|
||||
for attr, value in data.items():
|
||||
node_attr = "{}.{}".format(palette, attr)
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
|
|
@ -32,5 +32,5 @@ class ResetXgenAttributes(pyblish.api.InstancePlugin):
|
|||
# Need to save the scene, cause the attribute changes above does not
|
||||
# mark the scene as modified so user can exit without committing the
|
||||
# changes.
|
||||
self.log.info("Saving changes.")
|
||||
self.log.debug("Saving changes.")
|
||||
cmds.file(save=True)
|
||||
|
|
|
|||
|
|
@ -215,9 +215,9 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
:rtype: int
|
||||
:raises: Exception if template ID isn't found
|
||||
"""
|
||||
self.log.info("Trying to find template for [{}]".format(renderer))
|
||||
self.log.debug("Trying to find template for [{}]".format(renderer))
|
||||
mapped = _get_template_id(renderer)
|
||||
self.log.info("got id [{}]".format(mapped))
|
||||
self.log.debug("got id [{}]".format(mapped))
|
||||
return self._templates.get(mapped)
|
||||
|
||||
def _submit(self, payload):
|
||||
|
|
@ -454,8 +454,8 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
|
||||
self.preflight_check(instance)
|
||||
|
||||
self.log.info("Submitting ...")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
self.log.debug("Submitting ...")
|
||||
self.log.debug(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
response = self._submit(payload)
|
||||
# response = requests.post(url, json=payload)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class ValidateAssemblyName(pyblish.api.InstancePlugin):
|
|||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
cls.log.info("Checking name of {}".format(instance.name))
|
||||
cls.log.debug("Checking name of {}".format(instance.name))
|
||||
|
||||
content_instance = instance.data.get("setMembers", None)
|
||||
if not content_instance:
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class ValidateAssemblyNamespaces(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Checking namespace for %s" % instance.name)
|
||||
self.log.debug("Checking namespace for %s" % instance.name)
|
||||
if self.get_invalid(instance):
|
||||
raise PublishValidationError("Nested namespaces found")
|
||||
|
||||
|
|
|
|||
|
|
@ -47,10 +47,10 @@ class ValidateFrameRange(pyblish.api.InstancePlugin,
|
|||
|
||||
context = instance.context
|
||||
if instance.data.get("tileRendering"):
|
||||
self.log.info((
|
||||
self.log.debug(
|
||||
"Skipping frame range validation because "
|
||||
"tile rendering is enabled."
|
||||
))
|
||||
)
|
||||
return
|
||||
|
||||
frame_start_handle = int(context.data.get("frameStartHandle"))
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class ValidateGLSLMaterial(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
|
||||
meshes = cmds.ls(instance, type="mesh", long=True)
|
||||
cls.log.info("meshes: {}".format(meshes))
|
||||
cls.log.debug("meshes: {}".format(meshes))
|
||||
# load the glsl shader plugin
|
||||
cmds.loadPlugin("glslShader", quiet=True)
|
||||
|
||||
|
|
@ -96,8 +96,8 @@ class ValidateGLSLMaterial(pyblish.api.InstancePlugin):
|
|||
cls.log.warning("ogsfx shader file "
|
||||
"not found in {}".format(ogsfx_path))
|
||||
|
||||
cls.log.info("Find the ogsfx shader file in "
|
||||
"default maya directory...")
|
||||
cls.log.debug("Searching the ogsfx shader file in "
|
||||
"default maya directory...")
|
||||
# re-direct to search the ogsfx path in maya_dir
|
||||
ogsfx_path = os.getenv("MAYA_APP_DIR") + ogsfx_path
|
||||
if not os.path.exists(ogsfx_path):
|
||||
|
|
@ -130,8 +130,8 @@ class ValidateGLSLMaterial(pyblish.api.InstancePlugin):
|
|||
@classmethod
|
||||
def pbs_shader_conversion(cls, main_shader, glsl):
|
||||
|
||||
cls.log.info("StringrayPBS detected "
|
||||
"-> Can do texture conversion")
|
||||
cls.log.debug("StringrayPBS detected "
|
||||
"-> Can do texture conversion")
|
||||
|
||||
for shader in main_shader:
|
||||
# get the file textures related to the PBS Shader
|
||||
|
|
@ -168,8 +168,8 @@ class ValidateGLSLMaterial(pyblish.api.InstancePlugin):
|
|||
|
||||
@classmethod
|
||||
def arnold_shader_conversion(cls, main_shader, glsl):
|
||||
cls.log.info("aiStandardSurface detected "
|
||||
"-> Can do texture conversion")
|
||||
cls.log.debug("aiStandardSurface detected "
|
||||
"-> Can do texture conversion")
|
||||
|
||||
for shader in main_shader:
|
||||
# get the file textures related to the PBS Shader
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class ValidateInstancerContent(pyblish.api.InstancePlugin):
|
|||
members = instance.data['setMembers']
|
||||
export_members = instance.data['exactExportMembers']
|
||||
|
||||
self.log.info("Contents {0}".format(members))
|
||||
self.log.debug("Contents {0}".format(members))
|
||||
|
||||
if not len(members) == len(cmds.ls(members, type="instancer")):
|
||||
self.log.error("Instancer can only contain instancers")
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@ import pyblish.api
|
|||
|
||||
from openpype.pipeline.publish import PublishValidationError
|
||||
|
||||
VERBOSE = False
|
||||
|
||||
|
||||
def is_cache_resource(resource):
|
||||
"""Return whether resource is a cacheFile resource"""
|
||||
|
|
@ -73,9 +71,6 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
xml = all_files.pop(0)
|
||||
assert xml.endswith(".xml")
|
||||
|
||||
if VERBOSE:
|
||||
cls.log.info("Checking: {0}".format(all_files))
|
||||
|
||||
# Ensure all files exist (including ticks)
|
||||
# The remainder file paths should be the .mcx or .mcc files
|
||||
valdidate_files(all_files)
|
||||
|
|
@ -129,8 +124,8 @@ class ValidateInstancerFrameRanges(pyblish.api.InstancePlugin):
|
|||
# for the frames required by the time range.
|
||||
if ticks:
|
||||
ticks = list(sorted(ticks))
|
||||
cls.log.info("Found ticks: {0} "
|
||||
"(substeps: {1})".format(ticks, len(ticks)))
|
||||
cls.log.debug("Found ticks: {0} "
|
||||
"(substeps: {1})".format(ticks, len(ticks)))
|
||||
|
||||
# Check all frames except the last since we don't
|
||||
# require subframes after our time range.
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ class ValidateModelName(pyblish.api.InstancePlugin,
|
|||
r = re.compile(regex)
|
||||
|
||||
for obj in filtered:
|
||||
cls.log.info("testing: {}".format(obj))
|
||||
cls.log.debug("testing: {}".format(obj))
|
||||
m = r.match(obj)
|
||||
if m is None:
|
||||
cls.log.error("invalid name on: {}".format(obj))
|
||||
|
|
|
|||
|
|
@ -35,12 +35,12 @@ class ValidateMvLookContents(pyblish.api.InstancePlugin,
|
|||
publishMipMap = instance.data["publishMipMap"]
|
||||
enforced = True
|
||||
if intent in self.enforced_intents:
|
||||
self.log.info("This validation will be enforced: '{}'"
|
||||
.format(intent))
|
||||
self.log.debug("This validation will be enforced: '{}'"
|
||||
.format(intent))
|
||||
else:
|
||||
enforced = False
|
||||
self.log.info("This validation will NOT be enforced: '{}'"
|
||||
.format(intent))
|
||||
self.log.debug("This validation will NOT be enforced: '{}'"
|
||||
.format(intent))
|
||||
|
||||
if not instance[:]:
|
||||
raise PublishValidationError("Instance is empty")
|
||||
|
|
@ -75,8 +75,9 @@ class ValidateMvLookContents(pyblish.api.InstancePlugin,
|
|||
self.log.warning(msg)
|
||||
|
||||
if invalid:
|
||||
raise PublishValidationError("'{}' has invalid look "
|
||||
"content".format(instance.name))
|
||||
raise PublishValidationError(
|
||||
"'{}' has invalid look content".format(instance.name)
|
||||
)
|
||||
|
||||
def valid_file(self, fname):
|
||||
self.log.debug("Checking validity of '{}'".format(fname))
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class ValidateSkeletalMeshHierarchy(pyblish.api.InstancePlugin):
|
|||
parent.split("|")[1] for parent in (joints_parents + geo_parents)
|
||||
}
|
||||
|
||||
self.log.info(parents_set)
|
||||
self.log.debug(parents_set)
|
||||
|
||||
if len(set(parents_set)) > 2:
|
||||
raise PublishXmlValidationError(
|
||||
|
|
|
|||
|
|
@ -140,12 +140,12 @@ class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin,
|
|||
return
|
||||
|
||||
if not self.validate_mesh and not self.validate_collision:
|
||||
self.log.info("Validation of both mesh and collision names"
|
||||
"is disabled.")
|
||||
self.log.debug("Validation of both mesh and collision names"
|
||||
"is disabled.")
|
||||
return
|
||||
|
||||
if not instance.data.get("collisionMembers", None):
|
||||
self.log.info("There are no collision objects to validate")
|
||||
self.log.debug("There are no collision objects to validate")
|
||||
return
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
|
|
|||
|
|
@ -52,6 +52,6 @@ class ValidateVRayDistributedRendering(pyblish.api.InstancePlugin):
|
|||
|
||||
renderlayer = instance.data.get("renderlayer")
|
||||
with lib.renderlayer(renderlayer):
|
||||
cls.log.info("Enabling Distributed Rendering "
|
||||
"ignore in batch mode..")
|
||||
cls.log.debug("Enabling Distributed Rendering "
|
||||
"ignore in batch mode..")
|
||||
cmds.setAttr(cls.ignored_attr, True)
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin):
|
|||
# has any yeti callback set or not since if the callback
|
||||
# is there it wouldn't error and if it weren't then
|
||||
# nothing happens because there are no yeti nodes.
|
||||
cls.log.info(
|
||||
cls.log.debug(
|
||||
"Yeti is loaded but no yeti nodes were found. "
|
||||
"Callback validation skipped.."
|
||||
)
|
||||
|
|
@ -62,7 +62,7 @@ class ValidateYetiRenderScriptCallbacks(pyblish.api.InstancePlugin):
|
|||
|
||||
renderer = instance.data["renderer"]
|
||||
if renderer == "redshift":
|
||||
cls.log.info("Redshift ignores any pre and post render callbacks")
|
||||
cls.log.debug("Redshift ignores any pre and post render callbacks")
|
||||
return False
|
||||
|
||||
callback_lookup = cls.callbacks.get(renderer, {})
|
||||
|
|
|
|||
|
|
@ -37,8 +37,8 @@ class ValidateYetiRigInputShapesInInstance(pyblish.api.Validator):
|
|||
|
||||
# Allow publish without input meshes.
|
||||
if not shapes:
|
||||
cls.log.info("Found no input meshes for %s, skipping ..."
|
||||
% instance)
|
||||
cls.log.debug("Found no input meshes for %s, skipping ..."
|
||||
% instance)
|
||||
return []
|
||||
|
||||
# check if input node is part of groomRig instance
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue