mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 08:24:53 +01:00
Fix CG render publishing
Eallin production cherry picked and merged with current develop
This commit is contained in:
parent
d34be56dac
commit
82ac325448
4 changed files with 89 additions and 41 deletions
|
|
@ -132,6 +132,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
root_override = data.get("root")
|
||||
frame_start = int(data.get("frameStart"))
|
||||
frame_end = int(data.get("frameEnd"))
|
||||
subset = data.get("subset")
|
||||
|
||||
if root_override:
|
||||
if os.path.isabs(root_override):
|
||||
|
|
@ -162,11 +163,11 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
resolution_height = instance.get("resolutionHeight", 1080)
|
||||
lut_path = instance.get("lutPath", None)
|
||||
baked_mov_path = instance.get("bakeRenderPath")
|
||||
subset = instance.get("subset")
|
||||
families_data = instance.get("families")
|
||||
slate_frame = instance.get("slateFrame")
|
||||
version = instance.get("version")
|
||||
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
data = dict()
|
||||
|
|
@ -200,6 +201,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
if data.get("user"):
|
||||
context.data["user"] = data["user"]
|
||||
|
||||
if data.get("version"):
|
||||
version = data.get("version")
|
||||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
if "render" not in families:
|
||||
|
|
@ -274,6 +278,9 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
baked_mov_path))
|
||||
families.append("review")
|
||||
|
||||
if session['AVALON_APP'] == "maya":
|
||||
families.append("review")
|
||||
|
||||
self.log.info(
|
||||
"Adding representations to subset {}".format(
|
||||
subset))
|
||||
|
|
@ -399,7 +406,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"source": data.get("source", ""),
|
||||
"pixelAspect": pixel_aspect,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height
|
||||
"resolutionHeight": resolution_height,
|
||||
"version": version
|
||||
}
|
||||
)
|
||||
if lut_path:
|
||||
|
|
@ -421,6 +429,16 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"tags": ["review"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
# temporary ... allow only beauty on ftrack
|
||||
if session['AVALON_APP'] == "maya":
|
||||
AOV_filter = ['beauty']
|
||||
for aov in AOV_filter:
|
||||
if aov not in instance.data['subset']:
|
||||
instance.data['families'].remove('review')
|
||||
instance.data['families'].remove('ftrack')
|
||||
representation["tags"].remove('review')
|
||||
|
||||
self.log.debug(
|
||||
"__ representations {}".format(
|
||||
instance.data["representations"]))
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
start = instance.data.get("frameStart")
|
||||
|
|
@ -28,51 +29,74 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
||||
input_file = (
|
||||
collections[0].format('{head}{padding}{tail}') % start
|
||||
)
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
self.log.info("subset {}".format(instance.data['subset']))
|
||||
if 'crypto' in instance.data['subset']:
|
||||
return
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpegFile = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpegFile)
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
for repre in representations:
|
||||
self.log.debug(repre)
|
||||
if 'review' not in repre['tags']:
|
||||
return
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
input_file = repre['files'][0]
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpegFile = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpegFile)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
representation = {
|
||||
'name': 'jpg',
|
||||
'ext': 'jpg',
|
||||
'files': jpegFile,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'jpg',
|
||||
'ext': 'jpg',
|
||||
'files': jpegFile,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -46,6 +46,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if repre['ext'] in self.ext_filter:
|
||||
tags = repre.get("tags", [])
|
||||
|
||||
if "thumbnail" in tags:
|
||||
continue
|
||||
|
||||
self.log.info("Try repre: {}".format(repre))
|
||||
|
||||
if "review" in tags:
|
||||
|
|
|
|||
|
|
@ -341,6 +341,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
}
|
||||
}
|
||||
|
||||
if api.Session["AVALON_APP"] == "nuke":
|
||||
metadata['subset'] = subset
|
||||
|
||||
if submission_type == "muster":
|
||||
ftrack = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue