Merge branch 'develop' into feature/OP-4245Data_Exchange_Geometry

This commit is contained in:
Kayla Man 2023-03-06 14:25:30 +08:00
commit 18ae73de8a
21 changed files with 604 additions and 333 deletions

View file

@ -120,13 +120,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
track = sitem.parentTrack().name()
# node serialization
node = sitem.node()
node_serialized = self.node_serialisation(node)
node_serialized = self.node_serialization(node)
node_name = sitem.name()
if "_" in node_name:
node_class = re.sub(r"(?:_)[_0-9]+", "", node_name) # more numbers
else:
node_class = re.sub(r"\d+", "", node_name) # one number
node_class = node.Class()
# collect timelineIn/Out
effect_t_in = int(sitem.timelineIn())
@ -148,7 +144,7 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
"node": node_serialized
}}
def node_serialisation(self, node):
def node_serialization(self, node):
node_serialized = {}
# adding ignoring knob keys

View file

@ -2,9 +2,11 @@
"""Validate model nodes names."""
import os
import re
from maya import cmds
import pyblish.api
import platform
from maya import cmds
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import ValidateContentsOrder
import openpype.hosts.maya.api.action
@ -44,7 +46,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
if not cmds.ls(child, transforms=True):
return False
return True
except:
except Exception:
return False
invalid = []
@ -94,9 +96,10 @@ class ValidateModelName(pyblish.api.InstancePlugin):
# load shader list file as utf-8
shaders = []
if not use_db:
if cls.material_file:
if os.path.isfile(cls.material_file):
shader_file = open(cls.material_file, "r")
material_file = cls.material_file[platform.system().lower()]
if material_file:
if os.path.isfile(material_file):
shader_file = open(material_file, "r")
shaders = shader_file.readlines()
shader_file.close()
else:
@ -113,7 +116,7 @@ class ValidateModelName(pyblish.api.InstancePlugin):
shader_file.close()
# strip line endings from list
shaders = map(lambda s: s.rstrip(), shaders)
shaders = [s.rstrip() for s in shaders if s.rstrip()]
# compile regex for testing names
regex = cls.regex

View file

@ -1264,7 +1264,7 @@ def convert_to_valid_instaces():
creator_attr["farm_chunk"] = (
node["deadlineChunkSize"].value())
if "deadlineConcurrentTasks" in node.knobs():
creator_attr["farm_concurency"] = (
creator_attr["farm_concurrency"] = (
node["deadlineConcurrentTasks"].value())
_remove_old_knobs(node)

View file

@ -6,10 +6,7 @@ from openpype.pipeline import (
CreatedInstance
)
from openpype.lib import (
BoolDef,
NumberDef,
UISeparatorDef,
UILabelDef
BoolDef
)
from openpype.hosts.nuke import api as napi
@ -49,33 +46,6 @@ class CreateWritePrerender(napi.NukeWriteCreator):
self._get_render_target_enum(),
self._get_reviewable_bool()
]
if "farm_rendering" in self.instance_attributes:
attr_defs.extend([
UISeparatorDef(),
UILabelDef("Farm rendering attributes"),
BoolDef("suspended_publish", label="Suspended publishing"),
NumberDef(
"farm_priority",
label="Priority",
minimum=1,
maximum=99,
default=50
),
NumberDef(
"farm_chunk",
label="Chunk size",
minimum=1,
maximum=99,
default=10
),
NumberDef(
"farm_concurency",
label="Concurent tasks",
minimum=1,
maximum=10,
default=1
)
])
return attr_defs
def create_instance_node(self, subset_name, instance_data):

View file

@ -6,10 +6,7 @@ from openpype.pipeline import (
CreatedInstance
)
from openpype.lib import (
BoolDef,
NumberDef,
UISeparatorDef,
UILabelDef
BoolDef
)
from openpype.hosts.nuke import api as napi
@ -46,33 +43,6 @@ class CreateWriteRender(napi.NukeWriteCreator):
self._get_render_target_enum(),
self._get_reviewable_bool()
]
if "farm_rendering" in self.instance_attributes:
attr_defs.extend([
UISeparatorDef(),
UILabelDef("Farm rendering attributes"),
BoolDef("suspended_publish", label="Suspended publishing"),
NumberDef(
"farm_priority",
label="Priority",
minimum=1,
maximum=99,
default=50
),
NumberDef(
"farm_chunk",
label="Chunk size",
minimum=1,
maximum=99,
default=10
),
NumberDef(
"farm_concurency",
label="Concurent tasks",
minimum=1,
maximum=10,
default=1
)
])
return attr_defs
def create_instance_node(self, subset_name, instance_data):

View file

@ -132,14 +132,14 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
self.log.info("Publishing rendered frames ...")
elif render_target == "farm":
farm_priority = creator_attributes.get("farm_priority")
farm_chunk = creator_attributes.get("farm_chunk")
farm_concurency = creator_attributes.get("farm_concurency")
instance.data.update({
"deadlineChunkSize": farm_chunk or 1,
"deadlinePriority": farm_priority or 50,
"deadlineConcurrentTasks": farm_concurency or 0
})
farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"]
for key in farm_keys:
# Skip if key is not in creator attributes
if key not in creator_attributes:
continue
# Add farm attributes to instance
instance.data[key] = creator_attributes[key]
# Farm rendering
instance.data["transfer"] = False
instance.data["farm"] = True

View file

@ -9,11 +9,19 @@ import pyblish.api
import nuke
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import (
OpenPypePyblishPluginMixin
)
from openpype.tests.lib import is_in_tests
from openpype.lib import is_running_from_build
from openpype.lib import (
is_running_from_build,
BoolDef,
NumberDef
)
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
class NukeSubmitDeadline(pyblish.api.InstancePlugin,
OpenPypePyblishPluginMixin):
"""Submit write to Deadline
Renders are submitted to a Deadline Web Service as
@ -21,10 +29,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"""
label = "Submit to Deadline"
label = "Submit Nuke to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["nuke", "nukestudio"]
families = ["render.farm", "prerender.farm"]
hosts = ["nuke"]
families = ["render", "prerender.farm"]
optional = True
targets = ["local"]
@ -39,7 +47,42 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
env_allowed_keys = []
env_search_replace_values = {}
@classmethod
def get_attribute_defs(cls):
return [
NumberDef(
"priority",
label="Priority",
default=cls.priority,
decimals=0
),
NumberDef(
"chunk",
label="Frames Per Task",
default=cls.chunk_size,
decimals=0,
minimum=1,
maximum=1000
),
NumberDef(
"concurrency",
label="Concurency",
default=cls.concurrent_tasks,
decimals=0,
minimum=1,
maximum=10
),
BoolDef(
"use_gpu",
default=cls.use_gpu,
label="Use GPU"
)
]
def process(self, instance):
instance.data["attributeValues"] = self.get_attr_values_from_data(
instance.data)
instance.data["toBeRenderedOn"] = "deadline"
families = instance.data["families"]
@ -141,7 +184,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
exe_node_name,
start_frame,
end_frame,
responce_data=None
response_data=None
):
render_dir = os.path.normpath(os.path.dirname(render_path))
batch_name = os.path.basename(script_path)
@ -152,8 +195,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
output_filename_0 = self.preview_fname(render_path)
if not responce_data:
responce_data = {}
if not response_data:
response_data = {}
try:
# Ensure render folder exists
@ -161,20 +204,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
except OSError:
pass
# define chunk and priority
chunk_size = instance.data["deadlineChunkSize"]
if chunk_size == 0 and self.chunk_size:
chunk_size = self.chunk_size
# define chunk and priority
concurrent_tasks = instance.data["deadlineConcurrentTasks"]
if concurrent_tasks == 0 and self.concurrent_tasks:
concurrent_tasks = self.concurrent_tasks
priority = instance.data["deadlinePriority"]
if not priority:
priority = self.priority
# resolve any limit groups
limit_groups = self.get_limit_groups()
self.log.info("Limit groups: `{}`".format(limit_groups))
@ -193,9 +222,14 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# Arbitrary username, for visualisation in Monitor
"UserName": self._deadline_user,
"Priority": priority,
"ChunkSize": chunk_size,
"ConcurrentTasks": concurrent_tasks,
"Priority": instance.data["attributeValues"].get(
"priority", self.priority),
"ChunkSize": instance.data["attributeValues"].get(
"chunk", self.chunk_size),
"ConcurrentTasks": instance.data["attributeValues"].get(
"concurrency",
self.concurrent_tasks
),
"Department": self.department,
@ -234,7 +268,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"AWSAssetFile0": render_path,
# using GPU by default
"UseGpu": self.use_gpu,
"UseGpu": instance.data["attributeValues"].get(
"use_gpu", self.use_gpu),
# Only the specific write node is rendered.
"WriteNode": exe_node_name
@ -244,11 +279,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
"AuxFiles": []
}
if responce_data.get("_id"):
if response_data.get("_id"):
payload["JobInfo"].update({
"JobType": "Normal",
"BatchName": responce_data["Props"]["Batch"],
"JobDependency0": responce_data["_id"],
"BatchName": response_data["Props"]["Batch"],
"JobDependency0": response_data["_id"],
"ChunkSize": 99999999
})

View file

@ -23,36 +23,37 @@ class ShowInKitsu(LauncherAction):
return True
def process(self, session, **kwargs):
# Context inputs
project_name = session["AVALON_PROJECT"]
asset_name = session.get("AVALON_ASSET", None)
task_name = session.get("AVALON_TASK", None)
project = get_project(project_name=project_name,
fields=["data.zou_id"])
project = get_project(
project_name=project_name, fields=["data.zou_id"]
)
if not project:
raise RuntimeError(f"Project {project_name} not found.")
raise RuntimeError("Project {} not found.".format(project_name))
project_zou_id = project["data"].get("zou_id")
if not project_zou_id:
raise RuntimeError(f"Project {project_name} has no "
f"connected kitsu id.")
raise RuntimeError(
"Project {} has no connected kitsu id.".format(project_name)
)
asset_zou_name = None
asset_zou_id = None
asset_zou_type = 'Assets'
asset_zou_type = "Assets"
task_zou_id = None
zou_sub_type = ['AssetType', 'Sequence']
zou_sub_type = ["AssetType", "Sequence"]
if asset_name:
asset_zou_name = asset_name
asset_fields = ["data.zou.id", "data.zou.type"]
if task_name:
asset_fields.append(f"data.tasks.{task_name}.zou.id")
asset_fields.append("data.tasks.{}.zou.id".format(task_name))
asset = get_asset_by_name(project_name,
asset_name=asset_name,
fields=asset_fields)
asset = get_asset_by_name(
project_name, asset_name=asset_name, fields=asset_fields
)
asset_zou_data = asset["data"].get("zou")
@ -67,40 +68,47 @@ class ShowInKitsu(LauncherAction):
task_data = asset["data"]["tasks"][task_name]
task_zou_data = task_data.get("zou", {})
if not task_zou_data:
self.log.debug(f"No zou task data for task: {task_name}")
self.log.debug(
"No zou task data for task: {}".format(task_name)
)
task_zou_id = task_zou_data["id"]
# Define URL
url = self.get_url(project_id=project_zou_id,
asset_name=asset_zou_name,
asset_id=asset_zou_id,
asset_type=asset_zou_type,
task_id=task_zou_id)
url = self.get_url(
project_id=project_zou_id,
asset_name=asset_zou_name,
asset_id=asset_zou_id,
asset_type=asset_zou_type,
task_id=task_zou_id,
)
# Open URL in webbrowser
self.log.info(f"Opening URL: {url}")
webbrowser.open(url,
# Try in new tab
new=2)
self.log.info("Opening URL: {}".format(url))
webbrowser.open(
url,
# Try in new tab
new=2,
)
def get_url(self,
project_id,
asset_name=None,
asset_id=None,
asset_type=None,
task_id=None):
shots_url = {'Shots', 'Sequence', 'Shot'}
sub_type = {'AssetType', 'Sequence'}
def get_url(
self,
project_id,
asset_name=None,
asset_id=None,
asset_type=None,
task_id=None,
):
shots_url = {"Shots", "Sequence", "Shot"}
sub_type = {"AssetType", "Sequence"}
kitsu_module = self.get_kitsu_module()
# Get kitsu url with /api stripped
kitsu_url = kitsu_module.server_url
if kitsu_url.endswith("/api"):
kitsu_url = kitsu_url[:-len("/api")]
kitsu_url = kitsu_url[: -len("/api")]
sub_url = f"/productions/{project_id}"
asset_type_url = "Shots" if asset_type in shots_url else "Assets"
asset_type_url = "shots" if asset_type in shots_url else "assets"
if task_id:
# Go to task page
@ -120,6 +128,6 @@ class ShowInKitsu(LauncherAction):
# Add search method if is a sub_type
sub_url += f"/{asset_type_url}"
if asset_type in sub_type:
sub_url += f'?search={asset_name}'
sub_url += f"?search={asset_name}"
return f"{kitsu_url}{sub_url}"

View file

@ -13,6 +13,5 @@ class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in
# families = ["kitsu"]
def process(self, context):
gazu.client.set_host(os.environ["KITSU_SERVER"])
gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"])

View file

@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
import os
import gazu
import pyblish.api
@ -12,62 +10,69 @@ class CollectKitsuEntities(pyblish.api.ContextPlugin):
label = "Kitsu entities"
def process(self, context):
asset_data = context.data["assetEntity"]["data"]
zou_asset_data = asset_data.get("zou")
if not zou_asset_data:
raise AssertionError("Zou asset data not found in OpenPype!")
self.log.debug("Collected zou asset data: {}".format(zou_asset_data))
zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get(
"zou"
kitsu_project = gazu.project.get_project_by_name(
context.data["projectName"]
)
if not zou_task_data:
self.log.warning("Zou task data not found in OpenPype!")
self.log.debug("Collected zou task data: {}".format(zou_task_data))
kitsu_project = gazu.project.get_project(zou_asset_data["project_id"])
if not kitsu_project:
raise AssertionError("Project not found in kitsu!")
raise ValueError("Project not found in kitsu!")
context.data["kitsu_project"] = kitsu_project
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
entity_type = zou_asset_data["type"]
if entity_type == "Shot":
kitsu_entity = gazu.shot.get_shot(zou_asset_data["id"])
else:
kitsu_entity = gazu.asset.get_asset(zou_asset_data["id"])
kitsu_entities_by_id = {}
for instance in context:
asset_doc = instance.data.get("assetEntity")
if not asset_doc:
continue
if not kitsu_entity:
raise AssertionError("{} not found in kitsu!".format(entity_type))
zou_asset_data = asset_doc["data"].get("zou")
if not zou_asset_data:
raise ValueError("Zou asset data not found in OpenPype!")
context.data["kitsu_entity"] = kitsu_entity
self.log.debug(
"Collect kitsu {}: {}".format(entity_type, kitsu_entity)
)
task_name = instance.data.get("task")
if not task_name:
continue
if zou_task_data:
kitsu_task = gazu.task.get_task(zou_task_data["id"])
if not kitsu_task:
raise AssertionError("Task not found in kitsu!")
context.data["kitsu_task"] = kitsu_task
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
else:
kitsu_task_type = gazu.task.get_task_type_by_name(
os.environ["AVALON_TASK"]
zou_task_data = asset_doc["data"]["tasks"][task_name].get("zou")
self.log.debug(
"Collected zou task data: {}".format(zou_task_data)
)
if not kitsu_task_type:
raise AssertionError(
"Task type {} not found in Kitsu!".format(
os.environ["AVALON_TASK"]
entity_id = zou_asset_data["id"]
entity = kitsu_entities_by_id.get(entity_id)
if not entity:
entity = gazu.entity.get_entity(entity_id)
if not entity:
raise ValueError(
"{} was not found in kitsu!".format(
zou_asset_data["name"]
)
)
kitsu_entities_by_id[entity_id] = entity
instance.data["entity"] = entity
self.log.debug(
"Collect kitsu {}: {}".format(zou_asset_data["type"], entity)
)
if zou_task_data:
kitsu_task_id = zou_task_data["id"]
kitsu_task = kitsu_entities_by_id.get(kitsu_task_id)
if not kitsu_task:
kitsu_task = gazu.task.get_task(zou_task_data["id"])
kitsu_entities_by_id[kitsu_task_id] = kitsu_task
else:
kitsu_task_type = gazu.task.get_task_type_by_name(task_name)
if not kitsu_task_type:
raise ValueError(
"Task type {} not found in Kitsu!".format(task_name)
)
kitsu_task = gazu.task.get_task_by_name(
entity, kitsu_task_type
)
kitsu_task = gazu.task.get_task_by_name(
kitsu_entity, kitsu_task_type
)
if not kitsu_task:
raise AssertionError("Task not found in kitsu!")
context.data["kitsu_task"] = kitsu_task
raise ValueError("Task not found in kitsu!")
instance.data["kitsu_task"] = kitsu_task
self.log.debug("Collect kitsu task: {}".format(kitsu_task))

View file

@ -8,12 +8,11 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
order = pyblish.api.IntegratorOrder
label = "Kitsu Note and Status"
# families = ["kitsu"]
families = ["render", "kitsu"]
set_status_note = False
note_status_shortname = "wfa"
def process(self, context):
# Get comment text body
publish_comment = context.data.get("comment")
if not publish_comment:
@ -21,30 +20,33 @@ class IntegrateKitsuNote(pyblish.api.ContextPlugin):
self.log.debug("Comment is `{}`".format(publish_comment))
# Get note status, by default uses the task status for the note
# if it is not specified in the configuration
note_status = context.data["kitsu_task"]["task_status_id"]
if self.set_status_note:
kitsu_status = gazu.task.get_task_status_by_short_name(
self.note_status_shortname
)
if kitsu_status:
note_status = kitsu_status
self.log.info("Note Kitsu status: {}".format(note_status))
else:
self.log.info(
"Cannot find {} status. The status will not be "
"changed!".format(self.note_status_shortname)
for instance in context:
kitsu_task = instance.data.get("kitsu_task")
if kitsu_task is None:
continue
# Get note status, by default uses the task status for the note
# if it is not specified in the configuration
note_status = kitsu_task["task_status"]["id"]
if self.set_status_note:
kitsu_status = gazu.task.get_task_status_by_short_name(
self.note_status_shortname
)
if kitsu_status:
note_status = kitsu_status
self.log.info("Note Kitsu status: {}".format(note_status))
else:
self.log.info(
"Cannot find {} status. The status will not be "
"changed!".format(self.note_status_shortname)
)
# Add comment to kitsu task
self.log.debug(
"Add new note in taks id {}".format(
context.data["kitsu_task"]["id"]
# Add comment to kitsu task
task_id = kitsu_task["id"]
self.log.debug("Add new note in taks id {}".format(task_id))
kitsu_comment = gazu.task.add_comment(
task_id, note_status, comment=publish_comment
)
)
kitsu_comment = gazu.task.add_comment(
context.data["kitsu_task"], note_status, comment=publish_comment
)
context.data["kitsu_comment"] = kitsu_comment
instance.data["kitsu_comment"] = kitsu_comment

View file

@ -8,14 +8,12 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
order = pyblish.api.IntegratorOrder + 0.01
label = "Kitsu Review"
# families = ["kitsu"]
families = ["render", "kitsu"]
optional = True
def process(self, instance):
context = instance.context
task = context.data["kitsu_task"]
comment = context.data.get("kitsu_comment")
task = instance.data["kitsu_task"]["id"]
comment = instance.data["kitsu_comment"]["id"]
# Check comment has been created
if not comment:
@ -27,9 +25,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin):
# Add review representations as preview of comment
for representation in instance.data.get("representations", []):
# Skip if not tagged as review
if "review" not in representation.get("tags", []):
if "kitsureview" not in representation.get("tags", []):
continue
review_path = representation.get("published_path")
self.log.debug("Found review at: {}".format(review_path))

View file

@ -54,7 +54,8 @@ def validate_host(kitsu_url: str) -> bool:
if gazu.client.host_is_valid():
return True
else:
raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.")
raise gazu.exception.HostException(
"Host '{}' is invalid.".format(kitsu_url))
def clear_credentials():

View file

@ -1,3 +1,15 @@
"""
Bugs:
* Error when adding task type to anything that isn't Shot or Assets
* Assets don't get added under an episode if TV show
* Assets added under Main Pack throws error. No Main Pack name in dict
Features ToDo:
* Select in settings what types you wish to sync
* Print what's updated on entity-update
* Add listener for Edits
"""
import os
import threading
@ -5,6 +17,7 @@ import gazu
from openpype.client import get_project, get_assets, get_asset_by_name
from openpype.pipeline import AvalonMongoDB
from openpype.lib import Logger
from .credentials import validate_credentials
from .update_op_with_zou import (
create_op_asset,
@ -14,6 +27,8 @@ from .update_op_with_zou import (
update_op_assets,
)
log = Logger.get_logger(__name__)
class Listener:
"""Host Kitsu listener."""
@ -38,7 +53,7 @@ class Listener:
# Authenticate
if not validate_credentials(login, password):
raise gazu.exception.AuthFailedException(
f"Kitsu authentication failed for login: '{login}'..."
'Kitsu authentication failed for login: "{}"...'.format(login)
)
gazu.set_event_host(
@ -86,7 +101,9 @@ class Listener:
self.event_client, "sequence:delete", self._delete_sequence
)
gazu.events.add_listener(self.event_client, "shot:new", self._new_shot)
gazu.events.add_listener(
self.event_client, "shot:new", self._new_shot
)
gazu.events.add_listener(
self.event_client, "shot:update", self._update_shot
)
@ -94,7 +111,9 @@ class Listener:
self.event_client, "shot:delete", self._delete_shot
)
gazu.events.add_listener(self.event_client, "task:new", self._new_task)
gazu.events.add_listener(
self.event_client, "task:new", self._new_task
)
gazu.events.add_listener(
self.event_client, "task:update", self._update_task
)
@ -103,44 +122,62 @@ class Listener:
)
def start(self):
"""Start listening for events."""
log.info("Listening to Kitsu events...")
gazu.events.run_client(self.event_client)
def get_ep_dict(self, ep_id):
if ep_id and ep_id != "":
return gazu.entity.get_entity(ep_id)
return
# == Project ==
def _new_project(self, data):
"""Create new project into OP DB."""
# Use update process to avoid duplicating code
self._update_project(data)
self._update_project(data, new_project=True)
def _update_project(self, data):
def _update_project(self, data, new_project=False):
"""Update project into OP DB."""
# Get project entity
project = gazu.project.get_project(data["project_id"])
project_name = project["name"]
update_project = write_project_to_op(project, self.dbcon)
# Write into DB
if update_project:
self.dbcon.Session["AVALON_PROJECT"] = project_name
self.dbcon.Session["AVALON_PROJECT"] = get_kitsu_project_name(
data["project_id"]
)
self.dbcon.bulk_write([update_project])
if new_project:
log.info("Project created: {}".format(project["name"]))
def _delete_project(self, data):
"""Delete project."""
project_name = get_kitsu_project_name(data["project_id"])
collections = self.dbcon.database.list_collection_names()
for collection in collections:
project = self.dbcon.database[collection].find_one(
{"data.zou_id": data["project_id"]}
)
if project:
# Delete project collection
self.dbcon.database[project["name"]].drop()
# Delete project collection
self.dbcon.database[project_name].drop()
# Print message
log.info("Project deleted: {}".format(project["name"]))
return
# == Asset ==
def _new_asset(self, data):
"""Create new asset into OP DB."""
# Get project entity
set_op_project(self.dbcon, data["project_id"])
# Get gazu entity
# Get asset entity
asset = gazu.asset.get_asset(data["asset_id"])
# Insert doc in DB
@ -149,6 +186,21 @@ class Listener:
# Update
self._update_asset(data)
# Print message
ep_id = asset.get("episode_id")
ep = self.get_ep_dict(ep_id)
msg = (
"Asset created: {proj_name} - {ep_name}"
"{asset_type_name} - {asset_name}".format(
proj_name=asset["project_name"],
ep_name=ep["name"] + " - " if ep is not None else "",
asset_type_name=asset["asset_type_name"],
asset_name=asset["name"],
)
)
log.info(msg)
def _update_asset(self, data):
"""Update asset into OP DB."""
set_op_project(self.dbcon, data["project_id"])
@ -166,10 +218,15 @@ class Listener:
if asset_doc["data"].get("zou", {}).get("id")
}
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
gazu_project = gazu.project.get_project(asset["project_id"])
# Update
update_op_result = update_op_assets(
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
self.dbcon,
gazu_project,
project_doc,
[asset],
zou_ids_and_asset_docs,
)
if update_op_result:
asset_doc_id, asset_update = update_op_result[0]
@ -179,10 +236,27 @@ class Listener:
"""Delete asset of OP DB."""
set_op_project(self.dbcon, data["project_id"])
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["asset_id"]}
)
asset = self.dbcon.find_one({"data.zou.id": data["asset_id"]})
if asset:
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["asset_id"]}
)
# Print message
ep_id = asset["data"]["zou"].get("episode_id")
ep = self.get_ep_dict(ep_id)
msg = (
"Asset deleted: {proj_name} - {ep_name}"
"{type_name} - {asset_name}".format(
proj_name=asset["data"]["zou"]["project_name"],
ep_name=ep["name"] + " - " if ep is not None else "",
type_name=asset["data"]["zou"]["asset_type_name"],
asset_name=asset["name"],
)
)
log.info(msg)
# == Episode ==
def _new_episode(self, data):
@ -191,14 +265,20 @@ class Listener:
set_op_project(self.dbcon, data["project_id"])
# Get gazu entity
episode = gazu.shot.get_episode(data["episode_id"])
ep = gazu.shot.get_episode(data["episode_id"])
# Insert doc in DB
self.dbcon.insert_one(create_op_asset(episode))
self.dbcon.insert_one(create_op_asset(ep))
# Update
self._update_episode(data)
# Print message
msg = "Episode created: {proj_name} - {ep_name}".format(
proj_name=ep["project_name"], ep_name=ep["name"]
)
log.info(msg)
def _update_episode(self, data):
"""Update episode into OP DB."""
set_op_project(self.dbcon, data["project_id"])
@ -206,7 +286,7 @@ class Listener:
project_doc = get_project(project_name)
# Get gazu entity
episode = gazu.shot.get_episode(data["episode_id"])
ep = gazu.shot.get_episode(data["episode_id"])
# Find asset doc
# Query all assets of the local project
@ -215,11 +295,16 @@ class Listener:
for asset_doc in get_assets(project_name)
if asset_doc["data"].get("zou", {}).get("id")
}
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
zou_ids_and_asset_docs[ep["project_id"]] = project_doc
gazu_project = gazu.project.get_project(ep["project_id"])
# Update
update_op_result = update_op_assets(
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
self.dbcon,
gazu_project,
project_doc,
[ep],
zou_ids_and_asset_docs,
)
if update_op_result:
asset_doc_id, asset_update = update_op_result[0]
@ -228,12 +313,23 @@ class Listener:
def _delete_episode(self, data):
"""Delete shot of OP DB."""
set_op_project(self.dbcon, data["project_id"])
print("delete episode") # TODO check bugfix
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["episode_id"]}
)
ep = self.dbcon.find_one({"data.zou.id": data["episode_id"]})
if ep:
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["episode_id"]}
)
# Print message
project = gazu.project.get_project(
ep["data"]["zou"]["project_id"]
)
msg = "Episode deleted: {proj_name} - {ep_name}".format(
proj_name=project["name"], ep_name=ep["name"]
)
log.info(msg)
# == Sequence ==
def _new_sequence(self, data):
@ -250,6 +346,20 @@ class Listener:
# Update
self._update_sequence(data)
# Print message
ep_id = sequence.get("episode_id")
ep = self.get_ep_dict(ep_id)
msg = (
"Sequence created: {proj_name} - {ep_name}"
"{sequence_name}".format(
proj_name=sequence["project_name"],
ep_name=ep["name"] + " - " if ep is not None else "",
sequence_name=sequence["name"],
)
)
log.info(msg)
def _update_sequence(self, data):
"""Update sequence into OP DB."""
set_op_project(self.dbcon, data["project_id"])
@ -267,10 +377,15 @@ class Listener:
if asset_doc["data"].get("zou", {}).get("id")
}
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
gazu_project = gazu.project.get_project(sequence["project_id"])
# Update
update_op_result = update_op_assets(
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
self.dbcon,
gazu_project,
project_doc,
[sequence],
zou_ids_and_asset_docs,
)
if update_op_result:
asset_doc_id, asset_update = update_op_result[0]
@ -279,12 +394,30 @@ class Listener:
def _delete_sequence(self, data):
"""Delete sequence of OP DB."""
set_op_project(self.dbcon, data["project_id"])
print("delete sequence") # TODO check bugfix
sequence = self.dbcon.find_one({"data.zou.id": data["sequence_id"]})
if sequence:
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["sequence_id"]}
)
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["sequence_id"]}
)
# Print message
ep_id = sequence["data"]["zou"].get("episode_id")
ep = self.get_ep_dict(ep_id)
gazu_project = gazu.project.get_project(
sequence["data"]["zou"]["project_id"]
)
msg = (
"Sequence deleted: {proj_name} - {ep_name}"
"{sequence_name}".format(
proj_name=gazu_project["name"],
ep_name=ep["name"] + " - " if ep is not None else "",
sequence_name=sequence["name"],
)
)
log.info(msg)
# == Shot ==
def _new_shot(self, data):
@ -301,6 +434,21 @@ class Listener:
# Update
self._update_shot(data)
# Print message
ep_id = shot["episode_id"]
ep = self.get_ep_dict(ep_id)
msg = (
"Shot created: {proj_name} - {ep_name}"
"{sequence_name} - {shot_name}".format(
proj_name=shot["project_name"],
ep_name=ep["name"] + " - " if ep is not None else "",
sequence_name=shot["sequence_name"],
shot_name=shot["name"],
)
)
log.info(msg)
def _update_shot(self, data):
"""Update shot into OP DB."""
set_op_project(self.dbcon, data["project_id"])
@ -318,11 +466,17 @@ class Listener:
if asset_doc["data"].get("zou", {}).get("id")
}
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
gazu_project = gazu.project.get_project(shot["project_id"])
# Update
update_op_result = update_op_assets(
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
self.dbcon,
gazu_project,
project_doc,
[shot],
zou_ids_and_asset_docs,
)
if update_op_result:
asset_doc_id, asset_update = update_op_result[0]
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
@ -330,11 +484,28 @@ class Listener:
def _delete_shot(self, data):
"""Delete shot of OP DB."""
set_op_project(self.dbcon, data["project_id"])
shot = self.dbcon.find_one({"data.zou.id": data["shot_id"]})
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["shot_id"]}
)
if shot:
# Delete
self.dbcon.delete_one(
{"type": "asset", "data.zou.id": data["shot_id"]}
)
# Print message
ep_id = shot["data"]["zou"].get("episode_id")
ep = self.get_ep_dict(ep_id)
msg = (
"Shot deleted: {proj_name} - {ep_name}"
"{sequence_name} - {shot_name}".format(
proj_name=shot["data"]["zou"]["project_name"],
ep_name=ep["name"] + " - " if ep is not None else "",
sequence_name=shot["data"]["zou"]["sequence_name"],
shot_name=shot["name"],
)
)
log.info(msg)
# == Task ==
def _new_task(self, data):
@ -346,23 +517,59 @@ class Listener:
# Get gazu entity
task = gazu.task.get_task(data["task_id"])
# Find asset doc
parent_name = task["entity"]["name"]
# Print message
ep_id = task.get("episode_id")
ep = self.get_ep_dict(ep_id)
asset_doc = get_asset_by_name(project_name, parent_name)
parent_name = None
asset_name = None
ent_type = None
if task["task_type"]["for_entity"] == "Asset":
parent_name = task["entity"]["name"]
asset_name = task["entity"]["name"]
ent_type = task["entity_type"]["name"]
elif task["task_type"]["for_entity"] == "Shot":
parent_name = "{ep_name}{sequence_name} - {shot_name}".format(
ep_name=ep["name"] + " - " if ep is not None else "",
sequence_name=task["sequence"]["name"],
shot_name=task["entity"]["name"],
)
asset_name = "{ep_name}{sequence_name}_{shot_name}".format(
ep_name=ep["name"] + "_" if ep is not None else "",
sequence_name=task["sequence"]["name"],
shot_name=task["entity"]["name"],
)
# Update asset tasks with new one
asset_tasks = asset_doc["data"].get("tasks")
task_type_name = task["task_type"]["name"]
asset_tasks[task_type_name] = {"type": task_type_name, "zou": task}
self.dbcon.update_one(
{"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}
)
asset_doc = get_asset_by_name(project_name, asset_name)
if asset_doc:
asset_tasks = asset_doc["data"].get("tasks")
task_type_name = task["task_type"]["name"]
asset_tasks[task_type_name] = {
"type": task_type_name,
"zou": task,
}
self.dbcon.update_one(
{"_id": asset_doc["_id"]},
{"$set": {"data.tasks": asset_tasks}},
)
# Print message
msg = (
"Task created: {proj} - {ent_type}{parent}"
" - {task}".format(
proj=task["project"]["name"],
ent_type=ent_type + " - " if ent_type is not None else "",
parent=parent_name,
task=task["task_type"]["name"],
)
)
log.info(msg)
def _update_task(self, data):
"""Update task into OP DB."""
# TODO is it necessary?
pass
def _delete_task(self, data):
"""Delete task of OP DB."""
@ -384,6 +591,31 @@ class Listener:
{"_id": doc["_id"]},
{"$set": {"data.tasks": asset_tasks}},
)
# Print message
entity = gazu.entity.get_entity(task["zou"]["entity_id"])
ep = self.get_ep_dict(entity["source_id"])
if entity["type"] == "Asset":
parent_name = "{ep}{entity_type} - {entity}".format(
ep=ep["name"] + " - " if ep is not None else "",
entity_type=task["zou"]["entity_type"]["name"],
entity=task["zou"]["entity"]["name"],
)
elif entity["type"] == "Shot":
parent_name = "{ep}{sequence} - {shot}".format(
ep=ep["name"] + " - " if ep is not None else "",
sequence=task["zou"]["sequence"]["name"],
shot=task["zou"]["entity"]["name"],
)
msg = "Task deleted: {proj} - {parent} - {task}".format(
proj=task["zou"]["project"]["name"],
parent=parent_name,
task=name,
)
log.info(msg)
return
@ -394,9 +626,10 @@ def start_listeners(login: str, password: str):
login (str): Kitsu user login
password (str): Kitsu user password
"""
# Refresh token every week
def refresh_token_every_week():
print("Refreshing token...")
log.info("Refreshing token...")
gazu.refresh_token()
threading.Timer(7 * 3600 * 24, refresh_token_every_week).start()

View file

@ -5,10 +5,6 @@ from typing import Dict, List
from pymongo import DeleteOne, UpdateOne
import gazu
from gazu.task import (
all_tasks_for_asset,
all_tasks_for_shot,
)
from openpype.client import (
get_project,
@ -18,7 +14,6 @@ from openpype.client import (
create_project,
)
from openpype.pipeline import AvalonMongoDB
from openpype.settings import get_project_settings
from openpype.modules.kitsu.utils.credentials import validate_credentials
from openpype.lib import Logger
@ -69,6 +64,7 @@ def set_op_project(dbcon: AvalonMongoDB, project_id: str):
def update_op_assets(
dbcon: AvalonMongoDB,
gazu_project: dict,
project_doc: dict,
entities_list: List[dict],
asset_doc_ids: Dict[str, dict],
@ -78,14 +74,18 @@ def update_op_assets(
Args:
dbcon (AvalonMongoDB): Connection to DB
gazu_project (dict): Dict of gazu,
project_doc (dict): Dict of project,
entities_list (List[dict]): List of zou entities to update
asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...]
Returns:
List[Dict[str, dict]]: List of (doc_id, update_dict) tuples
"""
if not project_doc:
return
project_name = project_doc["name"]
project_module_settings = get_project_settings(project_name)["kitsu"]
assets_with_update = []
for item in entities_list:
@ -94,7 +94,8 @@ def update_op_assets(
if not item_doc: # Create asset
op_asset = create_op_asset(item)
insert_result = dbcon.insert_one(op_asset)
item_doc = get_asset_by_id(project_name, insert_result.inserted_id)
item_doc = get_asset_by_id(
project_name, insert_result.inserted_id)
# Update asset
item_data = deepcopy(item_doc["data"])
@ -113,38 +114,73 @@ def update_op_assets(
except (TypeError, ValueError):
frame_in = 1001
item_data["frameStart"] = frame_in
# Frames duration, fallback on 0
# Frames duration, fallback on 1
try:
# NOTE nb_frames is stored directly in item
# because of zou's legacy design
frames_duration = int(item.get("nb_frames", 0))
frames_duration = int(item.get("nb_frames", 1))
except (TypeError, ValueError):
frames_duration = 0
frames_duration = None
# Frame out, fallback on frame_in + duration or project's value or 1001
frame_out = item_data.pop("frame_out", None)
if not frame_out:
frame_out = frame_in + frames_duration
try:
frame_out = int(frame_out)
except (TypeError, ValueError):
frame_out = 1001
if frames_duration:
frame_out = frame_in + frames_duration - 1
else:
frame_out = project_doc["data"].get("frameEnd", frame_in)
item_data["frameEnd"] = frame_out
# Fps, fallback to project's value or default value (25.0)
try:
fps = float(item_data.get("fps", project_doc["data"].get("fps")))
fps = float(item_data.get("fps"))
except (TypeError, ValueError):
fps = 25.0
fps = float(gazu_project.get(
"fps", project_doc["data"].get("fps", 25)))
item_data["fps"] = fps
# Resolution, fall back to project default
match_res = re.match(
r"(\d+)x(\d+)",
item_data.get("resolution", gazu_project.get("resolution"))
)
if match_res:
item_data["resolutionWidth"] = int(match_res.group(1))
item_data["resolutionHeight"] = int(match_res.group(2))
else:
item_data["resolutionWidth"] = project_doc["data"].get(
"resolutionWidth")
item_data["resolutionHeight"] = project_doc["data"].get(
"resolutionHeight")
# Properties that doesn't fully exist in Kitsu.
# Guessing those property names below:
# Pixel Aspect Ratio
item_data["pixelAspect"] = item_data.get(
"pixel_aspect", project_doc["data"].get("pixelAspect"))
# Handle Start
item_data["handleStart"] = item_data.get(
"handle_start", project_doc["data"].get("handleStart"))
# Handle End
item_data["handleEnd"] = item_data.get(
"handle_end", project_doc["data"].get("handleEnd"))
# Clip In
item_data["clipIn"] = item_data.get(
"clip_in", project_doc["data"].get("clipIn"))
# Clip Out
item_data["clipOut"] = item_data.get(
"clip_out", project_doc["data"].get("clipOut"))
# Tasks
tasks_list = []
item_type = item["type"]
if item_type == "Asset":
tasks_list = all_tasks_for_asset(item)
tasks_list = gazu.task.all_tasks_for_asset(item)
elif item_type == "Shot":
tasks_list = all_tasks_for_shot(item)
tasks_list = gazu.task.all_tasks_for_shot(item)
item_data["tasks"] = {
t["task_type_name"]: {"type": t["task_type_name"], "zou": t}
item_data["tasks"] = {
t["task_type_name"]: {
"type": t["task_type_name"],
"zou": gazu.task.get_task(t["id"]),
}
}
for t in tasks_list
}
@ -176,9 +212,14 @@ def update_op_assets(
entity_root_asset_name = "Shots"
# Root parent folder if exist
visual_parent_doc_id = (
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
)
visual_parent_doc_id = None
if parent_zou_id is not None:
parent_zou_id_dict = asset_doc_ids.get(parent_zou_id)
if parent_zou_id_dict is not None:
visual_parent_doc_id = (
parent_zou_id_dict.get("_id")
if parent_zou_id_dict else None)
if visual_parent_doc_id is None:
# Find root folder doc ("Assets" or "Shots")
root_folder_doc = get_asset_by_name(
@ -197,12 +238,15 @@ def update_op_assets(
item_data["parents"] = []
ancestor_id = parent_zou_id
while ancestor_id is not None:
parent_doc = asset_doc_ids[ancestor_id]
item_data["parents"].insert(0, parent_doc["name"])
parent_doc = asset_doc_ids.get(ancestor_id)
if parent_doc is not None:
item_data["parents"].insert(0, parent_doc["name"])
# Get parent entity
parent_entity = parent_doc["data"]["zou"]
ancestor_id = parent_entity.get("parent_id")
# Get parent entity
parent_entity = parent_doc["data"]["zou"]
ancestor_id = parent_entity.get("parent_id")
else:
ancestor_id = None
# Build OpenPype compatible name
if item_type in ["Shot", "Sequence"] and parent_zou_id is not None:
@ -250,13 +294,12 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
UpdateOne: Update instance for the project
"""
project_name = project["name"]
project_doc = get_project(project_name)
if not project_doc:
log.info(f"Creating project '{project_name}'")
project_doc = create_project(project_name, project_name)
project_dict = get_project(project_name)
if not project_dict:
project_dict = create_project(project_name, project_name)
# Project data and tasks
project_data = project_doc["data"] or {}
project_data = project_dict["data"] or {}
# Build project code and update Kitsu
project_code = project.get("code")
@ -287,7 +330,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
)
return UpdateOne(
{"_id": project_doc["_id"]},
{"_id": project_dict["_id"]},
{
"$set": {
"config.tasks": {
@ -301,7 +344,8 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
)
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
def sync_all_projects(
login: str, password: str, ignore_projects: list = None):
"""Update all OP projects in DB with Zou data.
Args:
@ -346,7 +390,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
if not project:
project = gazu.project.get_project_by_name(project["name"])
log.info(f"Synchronizing {project['name']}...")
log.info("Synchronizing {}...".format(project['name']))
# Get all assets from zou
all_assets = gazu.asset.all_assets_for_project(project)
@ -365,12 +409,16 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
]
# Sync project. Create if doesn't exist
project_name = project["name"]
project_dict = get_project(project_name)
if not project_dict:
log.info("Project created: {}".format(project_name))
bulk_writes.append(write_project_to_op(project, dbcon))
# Try to find project document
project_name = project["name"]
if not project_dict:
project_dict = get_project(project_name)
dbcon.Session["AVALON_PROJECT"] = project_name
project_doc = get_project(project_name)
# Query all assets of the local project
zou_ids_and_asset_docs = {
@ -378,7 +426,7 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
for asset_doc in get_assets(project_name)
if asset_doc["data"].get("zou", {}).get("id")
}
zou_ids_and_asset_docs[project["id"]] = project_doc
zou_ids_and_asset_docs[project["id"]] = project_dict
# Create entities root folders
to_insert = [
@ -389,6 +437,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
"data": {
"root_of": r,
"tasks": {},
"visualParent": None,
"parents": [],
},
}
for r in ["Assets", "Shots"]
@ -423,7 +473,8 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
[
UpdateOne({"_id": id}, update)
for id, update in update_op_assets(
dbcon, project_doc, all_entities, zou_ids_and_asset_docs
dbcon, project, project_dict,
all_entities, zou_ids_and_asset_docs
)
]
)

View file

@ -61,7 +61,7 @@ def sync_zou_from_op_project(
project_doc = get_project(project_name)
# Get all entities from zou
print(f"Synchronizing {project_name}...")
print("Synchronizing {}...".format(project_name))
zou_project = gazu.project.get_project_by_name(project_name)
# Create project
@ -82,7 +82,9 @@ def sync_zou_from_op_project(
f"x{project_doc['data']['resolutionHeight']}",
}
)
gazu.project.update_project_data(zou_project, data=project_doc["data"])
gazu.project.update_project_data(
zou_project, data=project_doc["data"]
)
gazu.project.update_project(zou_project)
asset_types = gazu.asset.all_asset_types()
@ -98,8 +100,7 @@ def sync_zou_from_op_project(
project_module_settings = get_project_settings(project_name)["kitsu"]
dbcon.Session["AVALON_PROJECT"] = project_name
asset_docs = {
asset_doc["_id"]: asset_doc
for asset_doc in get_assets(project_name)
asset_doc["_id"]: asset_doc for asset_doc in get_assets(project_name)
}
# Create new assets
@ -174,7 +175,9 @@ def sync_zou_from_op_project(
doc["name"],
frame_in=doc["data"]["frameStart"],
frame_out=doc["data"]["frameEnd"],
nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"],
nb_frames=(
doc["data"]["frameEnd"] - doc["data"]["frameStart"] + 1
),
)
elif match.group(2): # Sequence
@ -229,7 +232,7 @@ def sync_zou_from_op_project(
"frame_in": frame_in,
"frame_out": frame_out,
},
"nb_frames": frame_out - frame_in,
"nb_frames": frame_out - frame_in + 1,
}
)
entity = gazu.raw.update("entities", zou_id, entity_data)
@ -258,7 +261,7 @@ def sync_zou_from_op_project(
for asset_doc in asset_docs.values()
}
for entity_id in deleted_entities:
gazu.raw.delete(f"data/entities/{entity_id}")
gazu.raw.delete("data/entities/{}".format(entity_id))
# Write into DB
if bulk_writes:

View file

@ -52,7 +52,6 @@
"enabled": true,
"optional": false,
"active": true,
"use_published": true,
"priority": 50,
"chunk_size": 10,
"concurrent_tasks": 1,

View file

@ -139,7 +139,8 @@
"ext": "mp4",
"tags": [
"burnin",
"ftrackreview"
"ftrackreview",
"kitsureview"
],
"burnins": [],
"ffmpeg_args": {

View file

@ -285,11 +285,6 @@
"key": "active",
"label": "Active"
},
{
"type": "boolean",
"key": "use_published",
"label": "Use Published scene"
},
{
"type": "splitter"
},

View file

@ -16,6 +16,9 @@
{
"shotgridreview": "Add review to Shotgrid"
},
{
"kitsureview": "Add review to Kitsu"
},
{
"delete": "Delete output"
},

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.2-nightly.3"
__version__ = "3.15.2-nightly.4"