mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Global: Tweak logging for artist-facing reports in publisher
This commit is contained in:
parent
eec1d82db2
commit
63923ff4d8
21 changed files with 81 additions and 77 deletions
|
|
@ -69,7 +69,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
skip_cleanup_filepaths.add(os.path.normpath(path))
|
||||
|
||||
if self.remove_temp_renders:
|
||||
self.log.info("Cleaning renders new...")
|
||||
self.log.debug("Cleaning renders new...")
|
||||
self.clean_renders(instance, skip_cleanup_filepaths)
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
|
|
@ -95,7 +95,9 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
if instance.data.get("stagingDir_persistent"):
|
||||
self.log.info("Staging dir: %s should be persistent" % staging_dir)
|
||||
self.log.debug(
|
||||
"Staging dir {} should be persistent".format(staging_dir)
|
||||
)
|
||||
return
|
||||
|
||||
self.log.info("Removing staging directory {}".format(staging_dir))
|
||||
|
|
|
|||
|
|
@ -26,10 +26,10 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
|
|||
# Skip process if is not in list of source hosts in which this
|
||||
# plugin should run
|
||||
if src_host_name not in self.allowed_hosts:
|
||||
self.log.info((
|
||||
self.log.debug(
|
||||
"Source host \"{}\" is not in list of enabled hosts {}."
|
||||
" Skipping"
|
||||
).format(str(src_host_name), str(self.allowed_hosts)))
|
||||
" Skipping".format(src_host_name, self.allowed_hosts)
|
||||
)
|
||||
return
|
||||
|
||||
self.log.debug("Preparing filepaths to remove")
|
||||
|
|
@ -47,7 +47,7 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
|
|||
dirpaths_to_remove.add(os.path.normpath(staging_dir))
|
||||
|
||||
if not dirpaths_to_remove:
|
||||
self.log.info("Nothing to remove. Skipping")
|
||||
self.log.debug("Nothing to remove. Skipping")
|
||||
return
|
||||
|
||||
self.log.debug("Filepaths to remove are:\n{}".format(
|
||||
|
|
|
|||
|
|
@ -53,8 +53,8 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
):
|
||||
# Skip instances that already have audio filled
|
||||
if instance.data.get("audio"):
|
||||
self.log.info(
|
||||
"Skipping Audio collecion. It is already collected"
|
||||
self.log.debug(
|
||||
"Skipping Audio collection. It is already collected"
|
||||
)
|
||||
continue
|
||||
filtered_instances.append(instance)
|
||||
|
|
@ -70,7 +70,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
instances_by_asset_name[asset_name].append(instance)
|
||||
|
||||
asset_names = set(instances_by_asset_name.keys())
|
||||
self.log.info((
|
||||
self.log.debug((
|
||||
"Searching for audio subset '{subset}' in assets {assets}"
|
||||
).format(
|
||||
subset=self.audio_subset_name,
|
||||
|
|
@ -100,7 +100,7 @@ class CollectAudio(pyblish.api.ContextPlugin):
|
|||
"offset": 0,
|
||||
"filename": repre_path
|
||||
}]
|
||||
self.log.info("Audio Data added to instance ...")
|
||||
self.log.debug("Audio Data added to instance ...")
|
||||
|
||||
def query_representations(self, project_name, asset_names):
|
||||
"""Query representations related to audio subsets for passed assets.
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
final_context[project_name]['entity_type'] = 'Project'
|
||||
|
||||
for instance in context:
|
||||
self.log.info("Processing instance: `{}` ...".format(instance))
|
||||
self.log.debug("Processing instance: `{}` ...".format(instance))
|
||||
|
||||
# shot data dict
|
||||
shot_data = {}
|
||||
|
|
|
|||
|
|
@ -91,12 +91,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
# now we can just add instances from json file and we are done
|
||||
for instance_data in data.get("instances"):
|
||||
|
||||
self.log.info(" - processing instance for {}".format(
|
||||
self.log.debug(" - processing instance for {}".format(
|
||||
instance_data.get("subset")))
|
||||
instance = self._context.create_instance(
|
||||
instance_data.get("subset")
|
||||
)
|
||||
self.log.info("Filling stagingDir...")
|
||||
self.log.debug("Filling stagingDir...")
|
||||
|
||||
self._fill_staging_dir(instance_data, anatomy)
|
||||
instance.data.update(instance_data)
|
||||
|
|
@ -121,7 +121,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
"offset": 0
|
||||
}]
|
||||
})
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
f"Adding audio to instance: {instance.data['audio']}")
|
||||
|
||||
def process(self, context):
|
||||
|
|
@ -137,11 +137,11 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
|
||||
# Using already collected Anatomy
|
||||
anatomy = context.data["anatomy"]
|
||||
self.log.info("Getting root setting for project \"{}\"".format(
|
||||
self.log.debug("Getting root setting for project \"{}\"".format(
|
||||
anatomy.project_name
|
||||
))
|
||||
|
||||
self.log.info("anatomy: {}".format(anatomy.roots))
|
||||
self.log.debug("anatomy: {}".format(anatomy.roots))
|
||||
try:
|
||||
session_is_set = False
|
||||
for path in paths:
|
||||
|
|
@ -156,7 +156,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
if remapped:
|
||||
session_data["AVALON_WORKDIR"] = remapped
|
||||
|
||||
self.log.info("Setting session using data from file")
|
||||
self.log.debug("Setting session using data from file")
|
||||
legacy_io.Session.update(session_data)
|
||||
os.environ.update(session_data)
|
||||
session_is_set = True
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
return
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Instance does not have filled representations. Skipping")
|
||||
return
|
||||
|
||||
|
|
@ -135,11 +135,11 @@ class ExtractBurnin(publish.Extractor):
|
|||
burnin_defs, repre["tags"]
|
||||
)
|
||||
if not repre_burnin_defs:
|
||||
self.log.info((
|
||||
self.log.debug(
|
||||
"Skipped representation. All burnin definitions from"
|
||||
" selected profile does not match to representation's"
|
||||
" tags. \"{}\""
|
||||
).format(str(repre["tags"])))
|
||||
" selected profile do not match to representation's"
|
||||
" tags. \"{}\"".format(repre["tags"])
|
||||
)
|
||||
continue
|
||||
filtered_repres.append((repre, repre_burnin_defs))
|
||||
|
||||
|
|
@ -164,7 +164,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
logger=self.log)
|
||||
|
||||
if not profile:
|
||||
self.log.info((
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
|
|
@ -176,7 +176,7 @@ class ExtractBurnin(publish.Extractor):
|
|||
# Pre-filter burnin definitions by instance families
|
||||
burnin_defs = self.filter_burnins_defs(profile, instance)
|
||||
if not burnin_defs:
|
||||
self.log.info((
|
||||
self.log.debug((
|
||||
"Skipped instance. Burnin definitions are not set for profile"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Profile \"{}\""
|
||||
|
|
@ -223,10 +223,10 @@ class ExtractBurnin(publish.Extractor):
|
|||
# If result is None the requirement of conversion can't be
|
||||
# determined
|
||||
if do_convert is None:
|
||||
self.log.info((
|
||||
self.log.debug(
|
||||
"Can't determine if representation requires conversion."
|
||||
" Skipped."
|
||||
))
|
||||
)
|
||||
continue
|
||||
|
||||
# Do conversion if needed
|
||||
|
|
|
|||
|
|
@ -320,7 +320,7 @@ class ExtractOIIOTranscode(publish.Extractor):
|
|||
logger=self.log)
|
||||
|
||||
if not profile:
|
||||
self.log.info((
|
||||
self.log.debug((
|
||||
"Skipped instance. None of profiles in presets are for"
|
||||
" Host: \"{}\" | Families: \"{}\" | Task \"{}\""
|
||||
" | Task type \"{}\" | Subset \"{}\" "
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class ExtractColorspaceData(publish.Extractor,
|
|||
def process(self, instance):
|
||||
representations = instance.data.get("representations")
|
||||
if not representations:
|
||||
self.log.info("No representations at instance : `{}`".format(
|
||||
self.log.debug("No representations at instance : `{}`".format(
|
||||
instance))
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
if "hierarchyContext" not in context.data:
|
||||
self.log.info("skipping IntegrateHierarchyToAvalon")
|
||||
self.log.debug("skipping ExtractHierarchyToAvalon")
|
||||
return
|
||||
|
||||
if not legacy_io.Session:
|
||||
|
|
|
|||
|
|
@ -27,13 +27,13 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = context.data.get("hierarchyContext")
|
||||
if not hierarchy_context:
|
||||
self.log.info("Skipping")
|
||||
self.log.debug("Skipping ExtractHierarchyToAYON")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
hierarchy_context = self._filter_hierarchy(context)
|
||||
if not hierarchy_context:
|
||||
self.log.info("All folders were filtered out")
|
||||
self.log.debug("All folders were filtered out")
|
||||
return
|
||||
|
||||
self.log.debug("Hierarchy_context: {}".format(
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class ExtractReviewSlate(publish.Extractor):
|
|||
"*": inst_data["slateFrame"]
|
||||
}
|
||||
|
||||
self.log.info("_ slates_data: {}".format(pformat(slates_data)))
|
||||
self.log.debug("_ slates_data: {}".format(pformat(slates_data)))
|
||||
|
||||
if "reviewToWidth" in inst_data:
|
||||
use_legacy_code = True
|
||||
|
|
@ -76,7 +76,7 @@ class ExtractReviewSlate(publish.Extractor):
|
|||
)
|
||||
# get slate data
|
||||
slate_path = self._get_slate_path(input_file, slates_data)
|
||||
self.log.info("_ slate_path: {}".format(slate_path))
|
||||
self.log.debug("_ slate_path: {}".format(slate_path))
|
||||
|
||||
slate_width, slate_height = self._get_slates_resolution(slate_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -29,24 +29,24 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin):
|
|||
representations_new = []
|
||||
|
||||
for repre in representations:
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Processing representation {}".format(repre.get("name")))
|
||||
tags = repre.get("tags", [])
|
||||
if "toScanline" not in tags:
|
||||
self.log.info(" - missing toScanline tag")
|
||||
self.log.debug(" - missing toScanline tag")
|
||||
continue
|
||||
|
||||
# run only on exrs
|
||||
if repre.get("ext") != "exr":
|
||||
self.log.info("- not EXR files")
|
||||
self.log.debug("- not EXR files")
|
||||
continue
|
||||
|
||||
if not isinstance(repre['files'], (list, tuple)):
|
||||
input_files = [repre['files']]
|
||||
self.log.info("We have a single frame")
|
||||
self.log.debug("We have a single frame")
|
||||
else:
|
||||
input_files = repre['files']
|
||||
self.log.info("We have a sequence")
|
||||
self.log.debug("We have a sequence")
|
||||
|
||||
stagingdir = os.path.normpath(repre.get("stagingDir"))
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin):
|
|||
]
|
||||
|
||||
subprocess_exr = " ".join(oiio_cmd)
|
||||
self.log.info(f"running: {subprocess_exr}")
|
||||
self.log.debug(f"running: {subprocess_exr}")
|
||||
run_subprocess(subprocess_exr, logger=self.log)
|
||||
|
||||
# raise error if there is no ouptput
|
||||
|
|
|
|||
|
|
@ -43,12 +43,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
|
||||
# Skip if instance have 'review' key in data set to 'False'
|
||||
if not self._is_review_instance(instance):
|
||||
self.log.info("Skipping - no review set on instance.")
|
||||
self.log.debug("Skipping - no review set on instance.")
|
||||
return
|
||||
|
||||
# Check if already has thumbnail created
|
||||
if self._already_has_thumbnail(instance_repres):
|
||||
self.log.info("Thumbnail representation already present.")
|
||||
self.log.debug("Thumbnail representation already present.")
|
||||
return
|
||||
|
||||
# skip crypto passes.
|
||||
|
|
@ -58,15 +58,15 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# representation that can be determined much earlier and
|
||||
# with better precision.
|
||||
if "crypto" in subset_name.lower():
|
||||
self.log.info("Skipping crypto passes.")
|
||||
self.log.debug("Skipping crypto passes.")
|
||||
return
|
||||
|
||||
filtered_repres = self._get_filtered_repres(instance)
|
||||
if not filtered_repres:
|
||||
self.log.info((
|
||||
"Instance don't have representations"
|
||||
" that can be used as source for thumbnail. Skipping"
|
||||
))
|
||||
self.log.info(
|
||||
"Instance doesn't have representations that can be used "
|
||||
"as source for thumbnail. Skipping thumbnail extraction."
|
||||
)
|
||||
return
|
||||
|
||||
# Create temp directory for thumbnail
|
||||
|
|
@ -107,10 +107,10 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
# oiiotool isn't available
|
||||
if not thumbnail_created:
|
||||
if oiio_supported:
|
||||
self.log.info((
|
||||
self.log.debug(
|
||||
"Converting with FFMPEG because input"
|
||||
" can't be read by OIIO."
|
||||
))
|
||||
)
|
||||
|
||||
thumbnail_created = self.create_thumbnail_ffmpeg(
|
||||
full_input_path, full_output_path
|
||||
|
|
@ -165,8 +165,8 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
continue
|
||||
|
||||
if not repre.get("files"):
|
||||
self.log.info((
|
||||
"Representation \"{}\" don't have files. Skipping"
|
||||
self.log.debug((
|
||||
"Representation \"{}\" doesn't have files. Skipping"
|
||||
).format(repre["name"]))
|
||||
continue
|
||||
|
||||
|
|
@ -174,7 +174,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
return filtered_repres
|
||||
|
||||
def create_thumbnail_oiio(self, src_path, dst_path):
|
||||
self.log.info("Extracting thumbnail {}".format(dst_path))
|
||||
self.log.debug("Extracting thumbnail with OIIO: {}".format(dst_path))
|
||||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
"-a", src_path,
|
||||
|
|
@ -192,7 +192,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
|
|||
return False
|
||||
|
||||
def create_thumbnail_ffmpeg(self, src_path, dst_path):
|
||||
self.log.info("outputting {}".format(dst_path))
|
||||
self.log.debug("Extracting thumbnail with FFMPEG: {}".format(dst_path))
|
||||
|
||||
ffmpeg_path_args = get_ffmpeg_tool_args("ffmpeg")
|
||||
ffmpeg_args = self.ffmpeg_args or {}
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
|
||||
# Check if already has thumbnail created
|
||||
if self._instance_has_thumbnail(instance):
|
||||
self.log.info("Thumbnail representation already present.")
|
||||
self.log.debug("Thumbnail representation already present.")
|
||||
return
|
||||
|
||||
dst_filepath = self._create_thumbnail(
|
||||
|
|
@ -98,7 +98,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
thumbnail_created = False
|
||||
oiio_supported = is_oiio_supported()
|
||||
|
||||
self.log.info("Thumbnail source: {}".format(thumbnail_source))
|
||||
self.log.debug("Thumbnail source: {}".format(thumbnail_source))
|
||||
src_basename = os.path.basename(thumbnail_source)
|
||||
dst_filename = os.path.splitext(src_basename)[0] + "_thumb.jpg"
|
||||
full_output_path = os.path.join(dst_staging, dst_filename)
|
||||
|
|
@ -115,10 +115,10 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
# oiiotool isn't available
|
||||
if not thumbnail_created:
|
||||
if oiio_supported:
|
||||
self.log.info((
|
||||
self.log.info(
|
||||
"Converting with FFMPEG because input"
|
||||
" can't be read by OIIO."
|
||||
))
|
||||
)
|
||||
|
||||
thumbnail_created = self.create_thumbnail_ffmpeg(
|
||||
thumbnail_source, full_output_path
|
||||
|
|
@ -143,14 +143,14 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
return False
|
||||
|
||||
def create_thumbnail_oiio(self, src_path, dst_path):
|
||||
self.log.info("outputting {}".format(dst_path))
|
||||
self.log.debug("Outputting thumbnail with OIIO: {}".format(dst_path))
|
||||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
"-a", src_path,
|
||||
"--ch", "R,G,B",
|
||||
"-o", dst_path
|
||||
)
|
||||
self.log.info("Running: {}".format(" ".join(oiio_cmd)))
|
||||
self.log.debug("Running: {}".format(" ".join(oiio_cmd)))
|
||||
try:
|
||||
run_subprocess(oiio_cmd, logger=self.log)
|
||||
return True
|
||||
|
|
@ -173,7 +173,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
dst_path
|
||||
)
|
||||
|
||||
self.log.info("Running: {}".format(" ".join(ffmpeg_cmd)))
|
||||
self.log.debug("Running: {}".format(" ".join(ffmpeg_cmd)))
|
||||
try:
|
||||
run_subprocess(ffmpeg_cmd, logger=self.log)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class ExtractTrimVideoAudio(publish.Extractor):
|
|||
|
||||
# get staging dir
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Staging dir set to: `{}`".format(staging_dir))
|
||||
self.log.debug("Staging dir set to: `{}`".format(staging_dir))
|
||||
|
||||
# Generate mov file.
|
||||
fps = instance.data["fps"]
|
||||
|
|
@ -59,7 +59,7 @@ class ExtractTrimVideoAudio(publish.Extractor):
|
|||
extensions = [output_file_type]
|
||||
|
||||
for ext in extensions:
|
||||
self.log.info("Processing ext: `{}`".format(ext))
|
||||
self.log.debug("Processing ext: `{}`".format(ext))
|
||||
|
||||
if not ext.startswith("."):
|
||||
ext = "." + ext
|
||||
|
|
@ -98,7 +98,7 @@ class ExtractTrimVideoAudio(publish.Extractor):
|
|||
ffmpeg_args.append(clip_trimed_path)
|
||||
|
||||
joined_args = " ".join(ffmpeg_args)
|
||||
self.log.info(f"Processing: {joined_args}")
|
||||
self.log.debug(f"Processing: {joined_args}")
|
||||
run_subprocess(
|
||||
ffmpeg_args, logger=self.log
|
||||
)
|
||||
|
|
|
|||
|
|
@ -155,13 +155,13 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Instance should be integrated on a farm
|
||||
if instance.data.get("farm"):
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"Instance is marked to be processed on farm. Skipping")
|
||||
return
|
||||
|
||||
# Instance is marked to not get integrated
|
||||
if not instance.data.get("integrate", True):
|
||||
self.log.info("Instance is marked to skip integrating. Skipping")
|
||||
self.log.debug("Instance is marked to skip integrating. Skipping")
|
||||
return
|
||||
|
||||
filtered_repres = self.filter_representations(instance)
|
||||
|
|
@ -306,7 +306,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# increase if the file transaction takes a long time.
|
||||
op_session.commit()
|
||||
|
||||
self.log.info("Subset {subset[name]} and Version {version[name]} "
|
||||
self.log.info("Subset '{subset[name]}' and Version {version[name]} "
|
||||
"written to database..".format(subset=subset,
|
||||
version=version))
|
||||
|
||||
|
|
|
|||
|
|
@ -275,10 +275,10 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
backup_hero_publish_dir = _backup_hero_publish_dir
|
||||
break
|
||||
except Exception:
|
||||
self.log.info((
|
||||
self.log.info(
|
||||
"Could not remove previous backup folder."
|
||||
" Trying to add index to folder name"
|
||||
))
|
||||
" Trying to add index to folder name."
|
||||
)
|
||||
|
||||
_backup_hero_publish_dir = (
|
||||
backup_hero_publish_dir + str(idx)
|
||||
|
|
|
|||
|
|
@ -41,7 +41,9 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
if AYON_SERVER_ENABLED:
|
||||
self.log.info("AYON is enabled. Skipping v3 thumbnail integration")
|
||||
self.log.debug(
|
||||
"AYON is enabled. Skipping v3 thumbnail integration"
|
||||
)
|
||||
return
|
||||
|
||||
# Filter instances which can be used for integration
|
||||
|
|
@ -74,14 +76,14 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin):
|
|||
|
||||
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
|
||||
if not thumbnail_template:
|
||||
self.log.info("Thumbnail template is not filled. Skipping.")
|
||||
self.log.debug("Thumbnail template is not filled. Skipping.")
|
||||
return
|
||||
|
||||
if (
|
||||
not thumbnail_root
|
||||
and thumbnail_root_format_key in thumbnail_template
|
||||
):
|
||||
self.log.warning(("{} is not set. Skipping.").format(env_key))
|
||||
self.log.warning("{} is not set. Skipping.".format(env_key))
|
||||
return
|
||||
|
||||
# Collect verion ids from all filtered instance
|
||||
|
|
|
|||
|
|
@ -35,13 +35,13 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
if not AYON_SERVER_ENABLED:
|
||||
self.log.info("AYON is not enabled. Skipping")
|
||||
self.log.debug("AYON is not enabled. Skipping")
|
||||
return
|
||||
|
||||
# Filter instances which can be used for integration
|
||||
filtered_instance_items = self._prepare_instances(context)
|
||||
if not filtered_instance_items:
|
||||
self.log.info(
|
||||
self.log.debug(
|
||||
"All instances were filtered. Thumbnail integration skipped."
|
||||
)
|
||||
return
|
||||
|
|
@ -110,7 +110,7 @@ class IntegrateThumbnailsAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
# Skip instance if thumbnail path is not available for it
|
||||
if not thumbnail_path:
|
||||
self.log.info((
|
||||
self.log.debug((
|
||||
"Skipping thumbnail integration for instance \"{}\"."
|
||||
" Instance and context"
|
||||
" thumbnail paths are not available."
|
||||
|
|
|
|||
|
|
@ -22,11 +22,11 @@ class ValidateAssetDocs(pyblish.api.InstancePlugin):
|
|||
return
|
||||
|
||||
if instance.data.get("assetEntity"):
|
||||
self.log.info("Instance has set asset document in its data.")
|
||||
self.log.debug("Instance has set asset document in its data.")
|
||||
|
||||
elif instance.data.get("newAssetPublishing"):
|
||||
# skip if it is editorial
|
||||
self.log.info("Editorial instance is no need to check...")
|
||||
self.log.debug("Editorial instance has no need to check...")
|
||||
|
||||
else:
|
||||
raise PublishValidationError((
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
|
|||
}
|
||||
continue
|
||||
|
||||
self.log.info("correct asset: {}".format(asset))
|
||||
self.log.debug("correct asset: {}".format(asset))
|
||||
|
||||
if assets_missing_name:
|
||||
wrong_names = {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue