mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'master' into feature/PYPE-95-nks-load-subset-to-timeline
This commit is contained in:
commit
a7bde6004e
13 changed files with 135 additions and 153 deletions
|
|
@ -236,6 +236,7 @@ class SyncEntitiesFactory:
|
|||
" from TypedContext where project_id is \"{}\""
|
||||
)
|
||||
ignore_custom_attr_key = "avalon_ignore_sync"
|
||||
ignore_entity_types = ["milestone"]
|
||||
|
||||
report_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
|
|
@ -366,7 +367,10 @@ class SyncEntitiesFactory:
|
|||
parent_id = entity["parent_id"]
|
||||
entity_type = entity.entity_type
|
||||
entity_type_low = entity_type.lower()
|
||||
if entity_type_low == "task":
|
||||
if entity_type_low in self.ignore_entity_types:
|
||||
continue
|
||||
|
||||
elif entity_type_low == "task":
|
||||
entities_dict[parent_id]["tasks"].append(entity["name"])
|
||||
continue
|
||||
|
||||
|
|
|
|||
67
pype/lib.py
67
pype/lib.py
|
|
@ -361,23 +361,7 @@ def _get_host_name():
|
|||
|
||||
|
||||
def get_asset(asset_name=None):
|
||||
entity_data_keys_from_project_when_miss = [
|
||||
"frameStart", "frameEnd", "handleStart", "handleEnd", "fps",
|
||||
"resolutionWidth", "resolutionHeight"
|
||||
]
|
||||
|
||||
entity_keys_from_project_when_miss = []
|
||||
|
||||
alternatives = {
|
||||
"handleStart": "handles",
|
||||
"handleEnd": "handles"
|
||||
}
|
||||
|
||||
defaults = {
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
""" Returning asset document from database """
|
||||
if not asset_name:
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
|
||||
|
|
@ -385,57 +369,10 @@ def get_asset(asset_name=None):
|
|||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
|
||||
if not asset_document:
|
||||
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
|
||||
|
||||
project_document = io.find_one({"type": "project"})
|
||||
|
||||
for key in entity_data_keys_from_project_when_miss:
|
||||
if asset_document["data"].get(key):
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document["data"].get(alt_key)
|
||||
if value is not None:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(alt_key)
|
||||
if value:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document["data"][key] = defaults[key]
|
||||
|
||||
for key in entity_keys_from_project_when_miss:
|
||||
if asset_document.get(key):
|
||||
continue
|
||||
|
||||
value = project_document.get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
value = project_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document[key] = defaults[key]
|
||||
|
||||
return asset_document
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -519,11 +519,6 @@ class WorkfileSettings(object):
|
|||
self.data = kwargs
|
||||
|
||||
def get_nodes(self, nodes=None, nodes_filter=None):
|
||||
# filter out only dictionaries for node creation
|
||||
#
|
||||
# print("\n\n")
|
||||
# pprint(self._nodes)
|
||||
#
|
||||
|
||||
if not isinstance(nodes, list) and not isinstance(nodes_filter, list):
|
||||
return [n for n in nuke.allNodes()]
|
||||
|
|
@ -791,6 +786,8 @@ class WorkfileSettings(object):
|
|||
return
|
||||
data = self._asset_entity["data"]
|
||||
|
||||
log.debug("__ asset data: `{}`".format(data))
|
||||
|
||||
missing_cols = []
|
||||
check_cols = ["fps", "frameStart", "frameEnd",
|
||||
"handleStart", "handleEnd"]
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def open(filepath):
|
|||
class Openfile(api.Loader):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
families = ["write"]
|
||||
families = ["render2d"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Open"
|
||||
|
|
|
|||
|
|
@ -158,7 +158,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
os.environ.update(session)
|
||||
instance = metadata.get("instance")
|
||||
if instance:
|
||||
instance_family = instance.get("family")
|
||||
pixel_aspect = instance.get("pixelAspect", 1)
|
||||
resolution_width = instance.get("resolutionWidth", 1920)
|
||||
resolution_height = instance.get("resolutionHeight", 1080)
|
||||
|
|
@ -168,7 +167,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
slate_frame = instance.get("slateFrame")
|
||||
version = instance.get("version")
|
||||
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
data = dict()
|
||||
|
|
@ -211,14 +209,13 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
if "render" not in families:
|
||||
families.append("render")
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
if "write" in instance_family:
|
||||
families.append("write")
|
||||
if families_data and "render2d" in families_data:
|
||||
families.append("render2d")
|
||||
if families_data and "slate" in families_data:
|
||||
families.append("slate")
|
||||
families.append("slate.farm")
|
||||
|
||||
if data.get("attachTo"):
|
||||
# we need to attach found collections to existing
|
||||
|
|
@ -334,7 +331,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ["review"] if not baked_mov_path else [],
|
||||
"tags": ["review"] if not baked_mov_path else ["thumb-nuke"],
|
||||
}
|
||||
instance.data["representations"].append(
|
||||
representation)
|
||||
|
|
@ -388,8 +385,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
start = int(data.get("frameStart", indices[0]))
|
||||
end = int(data.get("frameEnd", indices[-1]))
|
||||
|
||||
ext = list(collection)[0].split(".")[-1]
|
||||
|
||||
|
|
|
|||
|
|
@ -32,6 +32,10 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
frame_end = int(instance.data.get("frameEnd") or 1)
|
||||
duration = frame_end - frame_start + 1
|
||||
|
||||
if "slate.farm" in instance.data["families"]:
|
||||
frame_start += 1
|
||||
duration -= 1
|
||||
|
||||
prep_data = {
|
||||
"username": instance.context.data['user'],
|
||||
"asset": os.environ['AVALON_ASSET'],
|
||||
|
|
@ -48,22 +52,6 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
datetime_data = instance.context.data.get("datetimeData") or {}
|
||||
prep_data.update(datetime_data)
|
||||
|
||||
slate_frame_start = frame_start
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration
|
||||
|
||||
# exception for slate workflow
|
||||
if "slate" in instance.data["families"]:
|
||||
slate_frame_start = frame_start - 1
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = slate_frame_end - slate_frame_start + 1
|
||||
|
||||
prep_data.update({
|
||||
"slate_frame_start": slate_frame_start,
|
||||
"slate_frame_end": slate_frame_end,
|
||||
"slate_duration": slate_duration
|
||||
})
|
||||
|
||||
# Update data with template data
|
||||
template_data = instance.data.get("assumedTemplateData") or {}
|
||||
prep_data.update(template_data)
|
||||
|
|
@ -111,6 +99,26 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
filled_anatomy = anatomy.format_all(_prep_data)
|
||||
_prep_data["anatomy"] = filled_anatomy.get_solved()
|
||||
|
||||
# dealing with slates
|
||||
slate_frame_start = frame_start
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration
|
||||
|
||||
# exception for slate workflow
|
||||
if ("slate" in instance.data["families"]):
|
||||
if "slate-frame" in repre.get("tags", []):
|
||||
slate_frame_start = frame_start - 1
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration + 1
|
||||
|
||||
self.log.debug("__1 slate_frame_start: {}".format(slate_frame_start))
|
||||
|
||||
_prep_data.update({
|
||||
"slate_frame_start": slate_frame_start,
|
||||
"slate_frame_end": slate_frame_end,
|
||||
"slate_duration": slate_duration
|
||||
})
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"codec": repre.get("codec", []),
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
label = "Extract Jpeg EXR"
|
||||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -41,62 +41,63 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
|
||||
for repre in representations:
|
||||
self.log.debug(repre)
|
||||
if 'review' not in repre['tags']:
|
||||
return
|
||||
if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']:
|
||||
if not isinstance(repre['files'], list):
|
||||
return
|
||||
|
||||
input_file = repre['files'][0]
|
||||
input_file = repre['files'][0]
|
||||
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpeg_file = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpeg_file)
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpeg_file = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpeg_file)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': jpeg_file,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
representation = {
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': jpeg_file,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -176,8 +176,8 @@ class LoadGizmoInputProcess(api.Loader):
|
|||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = "Please create Viewer node before you "
|
||||
"run this action again"
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -276,8 +276,8 @@ class LoadLutsInputProcess(api.Loader):
|
|||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = "Please create Viewer node before you "
|
||||
"run this action again"
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -41,6 +41,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
frame_length = int(
|
||||
last_frame - first_frame + 1
|
||||
)
|
||||
|
||||
if node["use_limit"].getValue():
|
||||
handles = 0
|
||||
|
|
@ -82,8 +85,26 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
collected_frames = [f for f in os.listdir(output_dir)
|
||||
if ext in f]
|
||||
if collected_frames:
|
||||
representation['frameStart'] = "%0{}d".format(
|
||||
collected_frames_len = len(collected_frames)
|
||||
frame_start_str = "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
representation['frameStart'] = frame_start_str
|
||||
|
||||
# in case slate is expected and not yet rendered
|
||||
self.log.debug("_ frame_length: {}".format(frame_length))
|
||||
self.log.debug(
|
||||
"_ collected_frames_len: {}".format(
|
||||
collected_frames_len))
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if "slate" in instance.data["families"] \
|
||||
and (frame_length == collected_frames_len):
|
||||
frame_slate_str = "%0{}d".format(
|
||||
len(str(last_frame))) % (first_frame - 1)
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
|
||||
representation['files'] = collected_frames
|
||||
instance.data["representations"].append(representation)
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ class ExtractSlateFrame(pype.api.Extractor):
|
|||
self.render_slate(instance)
|
||||
|
||||
def render_slate(self, instance):
|
||||
node_subset_name = instance.data.get("name", None)
|
||||
node = instance[0] # group node
|
||||
self.log.info("Creating staging dir...")
|
||||
|
||||
|
|
@ -47,6 +48,10 @@ class ExtractSlateFrame(pype.api.Extractor):
|
|||
self.log.info(
|
||||
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
|
||||
|
||||
frame_length = int(
|
||||
instance.data["frameEnd"] - instance.data["frameStart"] + 1
|
||||
)
|
||||
|
||||
temporary_nodes = []
|
||||
collection = instance.data.get("collection", None)
|
||||
|
||||
|
|
@ -56,10 +61,16 @@ class ExtractSlateFrame(pype.api.Extractor):
|
|||
"{head}{padding}{tail}"))
|
||||
fhead = collection.format("{head}")
|
||||
|
||||
collected_frames_len = int(len(collection.indexes))
|
||||
|
||||
# get first and last frame
|
||||
first_frame = min(collection.indexes) - 1
|
||||
|
||||
if "slate" in instance.data["families"]:
|
||||
self.log.info('frame_length: {}'.format(frame_length))
|
||||
self.log.info(
|
||||
'len(collection.indexes): {}'.format(collected_frames_len)
|
||||
)
|
||||
if ("slate" in instance.data["families"]) \
|
||||
and (frame_length != collected_frames_len):
|
||||
first_frame += 1
|
||||
|
||||
last_frame = first_frame
|
||||
|
|
@ -103,6 +114,8 @@ class ExtractSlateFrame(pype.api.Extractor):
|
|||
|
||||
# Render frames
|
||||
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
|
||||
# also render slate as sequence frame
|
||||
nuke.execute(node_subset_name, int(first_frame), int(last_frame))
|
||||
|
||||
self.log.debug(
|
||||
"slate frame path: {}".format(instance.data["slateFrame"]))
|
||||
|
|
|
|||
|
|
@ -76,7 +76,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
|||
'len(collection.indexes): {}'.format(collected_frames_len)
|
||||
)
|
||||
|
||||
if "slate" in instance.data["families"]:
|
||||
if ("slate" in instance.data["families"]) \
|
||||
and (frame_length != collected_frames_len):
|
||||
collected_frames_len -= 1
|
||||
|
||||
assert (collected_frames_len == frame_length), (
|
||||
|
|
|
|||
|
|
@ -30,9 +30,12 @@ class CollectClipTagFrameStart(api.InstancePlugin):
|
|||
except ValueError:
|
||||
if "source" in t_value:
|
||||
source_first = instance.data["sourceFirst"]
|
||||
if source_first == 0:
|
||||
source_first = 1
|
||||
self.log.info("Start frame on `{0}`".format(source_first))
|
||||
source_in = instance.data["sourceIn"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
start_frame = (source_first + source_in) - handle_start
|
||||
self.log.info("Start frame on `{0}`".format(source_in))
|
||||
start_frame = source_first + source_in
|
||||
|
||||
instance.data["startingFrame"] = start_frame
|
||||
self.log.info("Start frame on `{0}` set to `{1}`".format(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue