mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 13:24:54 +01:00
Merge branch 'hotfix/PYPE-720-nks-and-nk-fixes'
This commit is contained in:
commit
789cef635e
7 changed files with 61 additions and 128 deletions
67
pype/lib.py
67
pype/lib.py
|
|
@ -361,23 +361,7 @@ def _get_host_name():
|
|||
|
||||
|
||||
def get_asset(asset_name=None):
|
||||
entity_data_keys_from_project_when_miss = [
|
||||
"frameStart", "frameEnd", "handleStart", "handleEnd", "fps",
|
||||
"resolutionWidth", "resolutionHeight"
|
||||
]
|
||||
|
||||
entity_keys_from_project_when_miss = []
|
||||
|
||||
alternatives = {
|
||||
"handleStart": "handles",
|
||||
"handleEnd": "handles"
|
||||
}
|
||||
|
||||
defaults = {
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
""" Returning asset document from database """
|
||||
if not asset_name:
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
|
||||
|
|
@ -385,57 +369,10 @@ def get_asset(asset_name=None):
|
|||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
|
||||
if not asset_document:
|
||||
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
|
||||
|
||||
project_document = io.find_one({"type": "project"})
|
||||
|
||||
for key in entity_data_keys_from_project_when_miss:
|
||||
if asset_document["data"].get(key):
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document["data"].get(alt_key)
|
||||
if value is not None:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
value = project_document["data"].get(alt_key)
|
||||
if value:
|
||||
asset_document["data"][key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document["data"][key] = defaults[key]
|
||||
|
||||
for key in entity_keys_from_project_when_miss:
|
||||
if asset_document.get(key):
|
||||
continue
|
||||
|
||||
value = project_document.get(key)
|
||||
if value is not None or key not in alternatives:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
alt_key = alternatives[key]
|
||||
value = asset_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
value = project_document.get(alt_key)
|
||||
if value:
|
||||
asset_document[key] = value
|
||||
continue
|
||||
|
||||
if key in defaults:
|
||||
asset_document[key] = defaults[key]
|
||||
|
||||
return asset_document
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -519,11 +519,6 @@ class WorkfileSettings(object):
|
|||
self.data = kwargs
|
||||
|
||||
def get_nodes(self, nodes=None, nodes_filter=None):
|
||||
# filter out only dictionaries for node creation
|
||||
#
|
||||
# print("\n\n")
|
||||
# pprint(self._nodes)
|
||||
#
|
||||
|
||||
if not isinstance(nodes, list) and not isinstance(nodes_filter, list):
|
||||
return [n for n in nuke.allNodes()]
|
||||
|
|
@ -791,6 +786,8 @@ class WorkfileSettings(object):
|
|||
return
|
||||
data = self._asset_entity["data"]
|
||||
|
||||
log.debug("__ asset data: `{}`".format(data))
|
||||
|
||||
missing_cols = []
|
||||
check_cols = ["fps", "frameStart", "frameEnd",
|
||||
"handleStart", "handleEnd"]
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ def open(filepath):
|
|||
class Openfile(api.Loader):
|
||||
"""Open Image Sequence with system default"""
|
||||
|
||||
families = ["write"]
|
||||
families = ["render2d"]
|
||||
representations = ["*"]
|
||||
|
||||
label = "Open"
|
||||
|
|
|
|||
|
|
@ -211,12 +211,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
if "render" not in families:
|
||||
families.append("render")
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
if "write" in instance_family:
|
||||
families.append("write")
|
||||
if families_data and "render2d" in families_data:
|
||||
families.append("render2d")
|
||||
if families_data and "slate" in families_data:
|
||||
families.append("slate")
|
||||
|
||||
|
|
@ -334,7 +332,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ["review"] if not baked_mov_path else [],
|
||||
"tags": ["review"] if not baked_mov_path else ["thumb-nuke"],
|
||||
}
|
||||
instance.data["representations"].append(
|
||||
representation)
|
||||
|
|
@ -388,8 +386,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
start = int(data.get("frameStart", indices[0]))
|
||||
end = int(data.get("frameEnd", indices[-1]))
|
||||
|
||||
ext = list(collection)[0].split(".")[-1]
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
label = "Extract Jpeg EXR"
|
||||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -41,62 +41,63 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
|
||||
for repre in representations:
|
||||
self.log.debug(repre)
|
||||
if 'review' not in repre['tags']:
|
||||
return
|
||||
if 'review' in repre['tags'] or "thumb-nuke" in repre['tags']:
|
||||
if not isinstance(repre['files'], list):
|
||||
return
|
||||
|
||||
input_file = repre['files'][0]
|
||||
input_file = repre['files'][0]
|
||||
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpeg_file = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpeg_file)
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpeg_file = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpeg_file)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': jpeg_file,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
representation = {
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': jpeg_file,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -176,8 +176,8 @@ class LoadGizmoInputProcess(api.Loader):
|
|||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = "Please create Viewer node before you "
|
||||
"run this action again"
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -276,8 +276,8 @@ class LoadLutsInputProcess(api.Loader):
|
|||
if len(viewer) > 0:
|
||||
viewer = viewer[0]
|
||||
else:
|
||||
msg = "Please create Viewer node before you "
|
||||
"run this action again"
|
||||
msg = str("Please create Viewer node before you "
|
||||
"run this action again")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return None
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue