Merge branch '2.0/develop' into 2.0/PYPE-307-nks-basic-integration-pype2

This commit is contained in:
Jakub Jezek 2019-05-10 14:03:24 +02:00
commit 7237741033
88 changed files with 1741 additions and 599 deletions

View file

@ -26,15 +26,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
'render': 'render',
'nukescript': 'comp',
'review': 'mov'}
exclude = []
def process(self, instance):
for ex in self.exclude:
if ex in instance.data['families']:
return
self.log.debug('instance {}'.format(instance))
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
version_number = int(assumed_version)
@ -60,8 +54,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
self.log.debug('dest ext: ' + ext)
thumbnail = False
if ext in ['.mov']:
if not instance.data.get('startFrameReview'):
instance.data['startFrameReview'] = instance.data['startFrame']
@ -70,12 +62,13 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
location = ft_session.query(
'Location where name is "ftrack.server"').one()
component_data = {
"name": "ftrackreview-mp4", # Default component name is "main".
# Default component name is "main".
"name": "ftrackreview-mp4",
"metadata": {'ftr_meta': json.dumps({
'frameIn': int(instance.data['startFrameReview']),
'frameOut': int(instance.data['startFrameReview']),
'frameRate': 25})}
}
}
elif ext in [".jpg", ".jpeg"]:
component_data = {
"name": "thumbnail" # Default component name is "main".

View file

@ -0,0 +1,58 @@
import sys
import os
import subprocess
from avalon import api
def open(filepath):
"""Open file with system default executable"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', filepath))
elif os.name == 'nt':
os.startfile(filepath)
elif os.name == 'posix':
subprocess.call(('xdg-open', filepath))
class Openfile(api.Loader):
"""Open Image Sequence with system default"""
families = ["write"]
representations = ["*"]
label = "Open"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
from avalon.vendor import clique
directory = os.path.dirname(self.fname)
pattern = clique.PATTERNS["frames"]
files = os.listdir(directory)
representation = context["representation"]
ext = representation["name"]
path = representation["data"]["path"]
if ext in ["#"]:
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
seqeunce = collections[0]
first_image = list(seqeunce)[0]
filepath = os.path.normpath(os.path.join(directory, first_image))
else:
file = [f for f in files
if ext in f
if "#" not in f][0]
filepath = os.path.normpath(os.path.join(directory, file))
self.log.info("Opening : {}".format(filepath))
open(filepath)

View file

@ -1,49 +0,0 @@
import sys
import os
import subprocess
from avalon import api
def open(filepath):
"""Open file with system default executable"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', filepath))
elif os.name == 'nt':
os.startfile(filepath)
elif os.name == 'posix':
subprocess.call(('xdg-open', filepath))
class PlayImageSequence(api.Loader):
"""Open Image Sequence with system default"""
families = ["write"]
representations = ["*"]
label = "Play sequence"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
directory = self.fname
from avalon.vendor import clique
pattern = clique.PATTERNS["frames"]
files = os.listdir(directory)
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
assert not remainder, ("There shouldn't have been a remainder for "
"'%s': %s" % (directory, remainder))
seqeunce = collections[0]
first_image = list(seqeunce)[0]
filepath = os.path.normpath(os.path.join(directory, first_image))
self.log.info("Opening : {}".format(filepath))
open(filepath)

View file

@ -4,14 +4,18 @@ import pyblish.api
from avalon import io, api
class CollectAssumedDestination(pyblish.api.InstancePlugin):
class CollectAssumedDestination(pyblish.api.ContextPlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Collect Assumed Destination"
order = pyblish.api.CollectorOrder + 0.498
exclude_families = ["clip"]
def process(self, instance):
def process(self, context):
for instance in context:
self.process_item(instance)
def process_item(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
@ -19,7 +23,6 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# self.log.info(anatomy.anatomy())

View file

@ -195,7 +195,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
template_data["frame"] = src_collection.format(
"{padding}") % i
anatomy_filled = anatomy.format(template_data)
test_dest_files.append(anatomy_filled.render.path)
test_dest_files.append(anatomy_filled["render"]["path"])
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
@ -223,7 +223,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
#
template_data.pop("frame", None)
anatomy.pop("frame", None)
fname = files
@ -239,15 +238,21 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled.render.path
dst = anatomy_filled["render"]["path"]
instance.data["transfers"].append([src, dst])
template_data["frame"] = "#" * anatomy.render.padding
if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]:
template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"])
anatomy_filled = anatomy.format(template_data)
path_to_save = anatomy_filled.render.path
template = anatomy.render.fullpath
self.log.debug('ext[1:]: {}'.format(ext[1:]))
path_to_save = anatomy_filled["render"]["path"]
template = anatomy.templates["render"]["path"]
self.log.debug("path_to_save: {}".format(path_to_save))
representation = {
"schema": "pype:representation-2.0",

View file

@ -1,9 +1,4 @@
import os
import sys
import acre
from avalon import api, lib
from pype.tools import assetcreator
from pype.api import Logger
@ -19,9 +14,23 @@ class AssetCreator(api.Action):
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
if "AVALON_PROJECT" in session:
return True
return False
compatible = True
# Check required modules.
module_names = [
"ftrack_api", "ftrack_api_old", "pype.tools.assetcreator"
]
for name in module_names:
try:
__import__(name)
except ImportError:
compatible = False
# Check session environment.
if "AVALON_PROJECT" not in session:
compatible = False
return compatible
def process(self, session, **kwargs):
asset = ''

View file

@ -1,49 +0,0 @@
from avalon import api, io
from pype.api import Logger
try:
from pype.clockify import ClockifyAPI
except Exception:
pass
log = Logger().get_logger(__name__, "clockify_start")
class ClockifyStart(api.Action):
name = "clockify_start_timer"
label = "Clockify - Start Timer"
icon = "clockify_icon"
order = 500
exec("try: clockapi = ClockifyAPI()\nexcept: clockapi = None")
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
if self.clockapi is None:
return False
if "AVALON_TASK" in session:
return True
return False
def process(self, session, **kwargs):
project_name = session['AVALON_PROJECT']
asset_name = session['AVALON_ASSET']
task_name = session['AVALON_TASK']
description = asset_name
asset = io.find_one({
'type': 'asset',
'name': asset_name
})
if asset is not None:
desc_items = asset.get('data', {}).get('parents', [])
desc_items.append(asset_name)
desc_items.append(task_name)
description = '/'.join(desc_items)
project_id = self.clockapi.get_project_id(project_name)
tag_ids = []
tag_ids.append(self.clockapi.get_tag_id(task_name))
self.clockapi.start_time_entry(
description, project_id, tag_ids=tag_ids
)

View file

@ -1,65 +0,0 @@
from avalon import api, io
try:
from pype.clockify import ClockifyAPI
except Exception:
pass
from pype.api import Logger
log = Logger().get_logger(__name__, "clockify_sync")
class ClockifySync(api.Action):
name = "sync_to_clockify"
label = "Sync to Clockify"
icon = "clockify_white_icon"
order = 500
exec(
"try:\n\tclockapi = ClockifyAPI()"
"\n\thave_permissions = clockapi.validate_workspace_perm()"
"\nexcept:\n\tclockapi = None"
)
def is_compatible(self, session):
"""Return whether the action is compatible with the session"""
if self.clockapi is None:
return False
return self.have_permissions
def process(self, session, **kwargs):
project_name = session.get('AVALON_PROJECT', None)
projects_to_sync = []
if project_name.strip() == '' or project_name is None:
for project in io.projects():
projects_to_sync.append(project)
else:
project = io.find_one({'type': 'project'})
projects_to_sync.append(project)
projects_info = {}
for project in projects_to_sync:
task_types = [task['name'] for task in project['config']['tasks']]
projects_info[project['name']] = task_types
clockify_projects = self.clockapi.get_projects()
for project_name, task_types in projects_info.items():
if project_name not in clockify_projects:
response = self.clockapi.add_project(project_name)
if 'id' not in response:
self.log.error('Project {} can\'t be created'.format(
project_name
))
continue
project_id = response['id']
else:
project_id = clockify_projects[project_name]
clockify_workspace_tags = self.clockapi.get_tags()
for task_type in task_types:
if task_type not in clockify_workspace_tags:
response = self.clockapi.add_tag(task_type)
if 'id' not in response:
self.log.error('Task {} can\'t be created'.format(
task_type
))
continue

View file

@ -35,7 +35,7 @@ class CrateWriteRender(avalon.nuke.Creator):
data = OrderedDict()
data["family"] = self.family
data["family"] = self.family.split("_")[-1]
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
@ -103,48 +103,51 @@ class CrateWritePrerender(avalon.nuke.Creator):
create_write_node(self.data["subset"], write_data)
return
class CrateWriteStill(avalon.nuke.Creator):
# change this to template preset
preset = "still"
name = "WriteStill"
label = "Create Write Still"
hosts = ["nuke"]
family = "{}_write".format(preset)
families = preset
icon = "image"
def __init__(self, *args, **kwargs):
super(CrateWriteStill, self).__init__(*args, **kwargs)
data = OrderedDict()
data["family"] = self.family
data["families"] = self.families
{data.update({k: v}) for k, v in self.data.items()
if k not in data.keys()}
self.data = data
def process(self):
self.name = self.data["subset"]
instance = nuke.toNode(self.data["subset"])
family = self.family
node = 'write'
if not instance:
write_data = {
"frame_range": [nuke.frame(), nuke.frame()],
"class": node,
"preset": self.preset,
"avalon": self.data
}
nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
create_write_node(self.data["subset"], write_data)
return
#
#
# class CrateWriteStill(avalon.nuke.Creator):
# # change this to template preset
# preset = "still"
#
# name = "WriteStill"
# label = "Create Write Still"
# hosts = ["nuke"]
# family = "{}_write".format(preset)
# families = preset
# icon = "image"
#
# def __init__(self, *args, **kwargs):
# super(CrateWriteStill, self).__init__(*args, **kwargs)
#
# data = OrderedDict()
#
# data["family"] = self.family.split("_")[-1]
# data["families"] = self.families
#
# {data.update({k: v}) for k, v in self.data.items()
# if k not in data.keys()}
# self.data = data
#
# def process(self):
# self.name = self.data["subset"]
#
# node_name = self.data["subset"].replace(
# "_", "_f{}_".format(nuke.frame()))
# instance = nuke.toNode(self.data["subset"])
# self.data["subset"] = node_name
#
# family = self.family
# node = 'write'
#
# if not instance:
# write_data = {
# "frame_range": [nuke.frame(), nuke.frame()],
# "class": node,
# "preset": self.preset,
# "avalon": self.data
# }
#
# nuke.createNode("FrameHold", "first_frame {}".format(nuke.frame()))
# create_write_node(node_name, write_data)
#
# return

View file

@ -8,7 +8,7 @@ import avalon.io as io
import nuke
from pype.api import Logger
log = Logger.get_looger(__name__, "nuke")
log = Logger().get_logger(__name__, "nuke")
@contextlib.contextmanager
@ -88,8 +88,6 @@ class LoadSequence(api.Loader):
containerise,
viewer_update_and_undo_stop
)
# for k, v in context.items():
# log.info("key: `{}`, value: {}\n".format(k, v))
version = context['version']
version_data = version.get("data", {})
@ -137,12 +135,14 @@ class LoadSequence(api.Loader):
data_imprint.update({k: context["version"]['data'][k]})
data_imprint.update({"objectName": read_name})
r["tile_color"].setValue(int("0x4ecd25ff", 16))
return containerise(r,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__,
data=data_imprint)
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__,
data=data_imprint)
def switch(self, container, representation):
self.update(container, representation)
@ -150,18 +150,17 @@ class LoadSequence(api.Loader):
def update(self, container, representation):
"""Update the Loader's path
Fusion automatically tries to reset some variables when changing
Nuke automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
"""
from avalon.nuke import (
viewer_update_and_undo_stop,
ls_img_sequence,
update_container
)
log.info("this i can see")
node = nuke.toNode(container['objectName'])
# TODO: prepare also for other Read img/geo/camera
assert node.Class() == "Read", "Must be Read"
@ -170,8 +169,19 @@ class LoadSequence(api.Loader):
file = ls_img_sequence(os.path.dirname(root), one=True)
# Get start frame from version data
version = io.find_one({"type": "version",
"_id": representation["parent"]})
version = io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get all versions in list
versions = io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
max_version = max(versions)
start = version["data"].get("startFrame")
if start is None:
log.warning("Missing start frame for updated version"
@ -179,24 +189,44 @@ class LoadSequence(api.Loader):
"{} ({})".format(node['name'].value(), representation))
start = 0
with viewer_update_and_undo_stop():
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file["path"])
log.info("__ node['file']: {}".format(node["file"]))
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file["path"])
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(node, start, relative=False)
if global_in_changed:
# Log this change to the user
log.debug("Changed '{}' global in:"
" {:d}".format(node['name'].value(), start))
updated_dict = {}
updated_dict.update({
"representation": str(representation["_id"]),
"startFrame": start,
"endFrame": version["data"].get("endFrame"),
"version": version.get("name"),
"colorspace": version["data"].get("colorspace"),
"source": version["data"].get("source"),
"handles": version["data"].get("handles"),
"fps": version["data"].get("fps"),
"author": version["data"].get("author"),
"outputDir": version["data"].get("outputDir"),
})
# Update the imprinted representation
update_container(
node,
{"representation": str(representation["_id"])}
)
# change color of node
if version.get("name") not in [max_version]:
node["tile_color"].setValue(int("0xd84f20ff", 16))
else:
node["tile_color"].setValue(int("0x4ecd25ff", 16))
# Update the imprinted representation
update_container(
node,
updated_dict
)
log.info("udated to version: {}".format(version.get("name")))
def remove(self, container):

View file

@ -18,7 +18,7 @@ class CollectInstanceFamilies(pyblish.api.InstancePlugin):
families = []
if instance.data.get('families'):
families.append(instance.data['families'])
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
@ -36,10 +36,8 @@ class CollectInstanceFamilies(pyblish.api.InstancePlugin):
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)

View file

@ -1,13 +1,9 @@
import os
import tempfile
import nuke
import pyblish.api
import logging
import pype.api as pype
log = logging.get_logger(__name__)
@pyblish.api.log
class CollectNukeWrites(pyblish.api.ContextPlugin):
@ -65,9 +61,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
int(last_frame)
)
# preredered frames
# collect frames by try
# collect families in next file
if "files" not in instance.data:
instance.data["files"] = list()
try:
@ -89,8 +82,6 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
"colorspace": node["colorspace"].value(),
})
self.log.debug("instance.data: {}".format(instance.data))
self.log.debug("context: {}".format(context))

View file

@ -17,18 +17,11 @@ class NukeRenderLocal(pype.api.Extractor):
order = pyblish.api.ExtractorOrder
label = "Render Local"
hosts = ["nuke"]
families = ["render.local", "prerender.local", "still.local"]
families = ["render.local"]
def process(self, instance):
node = instance[0]
# This should be a ContextPlugin, but this is a workaround
# for a bug in pyblish to run once for a family: issue #250
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
self.log.debug("instance collected: {}".format(instance.data))
@ -70,8 +63,9 @@ class NukeRenderLocal(pype.api.Extractor):
collections, remainder = clique.assemble(*instance.data['files'])
self.log.info('collections: {}'.format(str(collections)))
collection = collections[0]
instance.data['collection'] = collection
if collections:
collection = collections[0]
instance.data['collection'] = collection
self.log.info('Finished render')
return

View file

@ -28,21 +28,29 @@ class ExtractDataForReview(pype.api.Extractor):
self.log.debug("creating staging dir:")
self.staging_dir(instance)
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.render_review_representation(instance,
representation="jpeg")
self.log.debug("instance: {}".format(instance))
self.log.debug("instance.data[families]: {}".format(
instance.data["families"]))
if "still" not in instance.data["families"]:
self.render_review_representation(instance,
representation="mov")
self.log.debug("review mov:")
self.transcode_mov(instance)
self.log.debug("instance.data: {}".format(instance.data))
self.render_review_representation(instance,
representation="jpeg")
else:
self.log.debug("instance: {}".format(instance))
self.render_review_representation(instance, representation="jpeg")
# Restore selection
[i["selected"].setValue(False) for i in nuke.allNodes()]
[i["selected"].setValue(True) for i in selection]
def transcode_mov(self, instance):
import subprocess
collection = instance.data["collection"]
staging_dir = instance.data["stagingDir"]
staging_dir = instance.data["stagingDir"].replace("\\", "/")
file_name = collection.format("{head}mov")
review_mov = os.path.join(staging_dir, file_name).replace("\\", "/")
@ -53,13 +61,16 @@ class ExtractDataForReview(pype.api.Extractor):
out, err = (
ffmpeg
.input(input_movie)
.output(review_mov, pix_fmt='yuv420p', crf=18, timecode="00:00:00:01")
.output(
review_mov,
pix_fmt='yuv420p',
crf=18,
timecode="00:00:00:01"
)
.overwrite_output()
.run()
)
self.log.debug("Removing `{0}`...".format(
instance.data["baked_colorspace_movie"]))
os.remove(instance.data["baked_colorspace_movie"])
@ -72,23 +83,32 @@ class ExtractDataForReview(pype.api.Extractor):
assert instance.data['files'], "Instance data files should't be empty!"
import clique
import nuke
temporary_nodes = []
staging_dir = instance.data["stagingDir"]
staging_dir = instance.data["stagingDir"].replace("\\", "/")
self.log.debug("StagingDir `{0}`...".format(staging_dir))
collection = instance.data.get("collection", None)
# Create nodes
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
if collection:
# get path
fname = os.path.basename(collection.format(
"{head}{padding}{tail}"))
fhead = collection.format("{head}")
# get first and last frame
first_frame = min(collection.indexes)
last_frame = max(collection.indexes)
else:
fname = os.path.basename(instance.data.get("path", None))
fhead = os.path.splitext(fname)[0] + "."
first_frame = instance.data.get("startFrame", None)
last_frame = instance.data.get("endFrame", None)
node = previous_node = nuke.createNode("Read")
node["file"].setValue(
os.path.join(staging_dir,
os.path.basename(collection.format(
"{head}{padding}{tail}"))).replace("\\", "/"))
os.path.join(staging_dir, fname).replace("\\", "/"))
node["first"].setValue(first_frame)
node["origfirst"].setValue(first_frame)
@ -126,7 +146,7 @@ class ExtractDataForReview(pype.api.Extractor):
write_node = nuke.createNode("Write")
if representation in "mov":
file = collection.format("{head}baked.mov")
file = fhead + "baked.mov"
path = os.path.join(staging_dir, file).replace("\\", "/")
self.log.debug("Path: {}".format(path))
instance.data["baked_colorspace_movie"] = path
@ -137,7 +157,7 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(write_node)
elif representation in "jpeg":
file = collection.format("{head}jpeg")
file = fhead + "jpeg"
path = os.path.join(staging_dir, file).replace("\\", "/")
instance.data["thumbnail"] = path
write_node["file"].setValue(path)
@ -147,8 +167,8 @@ class ExtractDataForReview(pype.api.Extractor):
temporary_nodes.append(write_node)
# retime for
first_frame = int(last_frame)/2
last_frame = int(last_frame)/2
first_frame = int(last_frame) / 2
last_frame = int(last_frame) / 2
# add into files for integration as representation
instance.data["files"].append(file)

View file

@ -28,7 +28,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
]
# Value of these attributes can be found on parents
hierarchical_attributes = ["fps"]
hierarchical_attributes = ["fps", "resolution_width", "resolution_height", "pixel_aspect"]
missing_attributes = []
asset_attributes = {}
@ -80,6 +80,7 @@ class ValidateScript(pyblish.api.InstancePlugin):
# Compare asset's values Nukescript X Database
not_matching = []
for attr in attributes:
self.log.debug("asset vs script attribute: {0}, {1}".format(asset_attributes[attr], script_attributes[attr]))
if asset_attributes[attr] != script_attributes[attr]:
not_matching.append(attr)