mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
f62c7b5c53
12 changed files with 688 additions and 686 deletions
|
|
@ -1,432 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract Harmony scene from zip file."""
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import six
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
import openpype.api
|
||||
from openpype.lib import get_workfile_template_key
|
||||
|
||||
|
||||
class ExtractHarmonyZip(openpype.api.Extractor):
|
||||
"""Extract Harmony zip."""
|
||||
|
||||
# Pyblish settings
|
||||
label = "Extract Harmony zip"
|
||||
order = pyblish.api.ExtractorOrder + 0.02
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["scene"]
|
||||
|
||||
# Properties
|
||||
session = None
|
||||
task_types = None
|
||||
task_statuses = None
|
||||
assetversion_statuses = None
|
||||
|
||||
# Presets
|
||||
create_workfile = True
|
||||
default_task = {
|
||||
"name": "harmonyIngest",
|
||||
"type": "Ingest",
|
||||
}
|
||||
default_task_status = "Ingested"
|
||||
assetversion_status = "Ingested"
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
context = instance.context
|
||||
self.session = context.data["ftrackSession"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
# asset_name = instance.data["asset"]
|
||||
subset_name = instance.data["subset"]
|
||||
instance_name = instance.data["name"]
|
||||
family = instance.data["family"]
|
||||
task = context.data["anatomyData"]["task"] or self.default_task
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
ftrack_id = asset_doc["data"]["ftrackId"]
|
||||
repres = instance.data["representations"]
|
||||
submitted_staging_dir = repres[0]["stagingDir"]
|
||||
submitted_files = repres[0]["files"]
|
||||
|
||||
# Get all the ftrack entities needed
|
||||
|
||||
# Asset Entity
|
||||
query = 'AssetBuild where id is "{}"'.format(ftrack_id)
|
||||
asset_entity = self.session.query(query).first()
|
||||
|
||||
# Project Entity
|
||||
query = 'Project where full_name is "{}"'.format(
|
||||
project_entity["name"]
|
||||
)
|
||||
project_entity = self.session.query(query).one()
|
||||
|
||||
# Get Task types and Statuses for creation if needed
|
||||
self.task_types = self._get_all_task_types(project_entity)
|
||||
self.task_statuses = self._get_all_task_statuses(project_entity)
|
||||
|
||||
# Get Statuses of AssetVersions
|
||||
self.assetversion_statuses = self._get_all_assetversion_statuses(
|
||||
project_entity
|
||||
)
|
||||
|
||||
# Setup the status that we want for the AssetVersion
|
||||
if self.assetversion_status:
|
||||
instance.data["assetversion_status"] = self.assetversion_status
|
||||
|
||||
# Create the default_task if it does not exist
|
||||
if task == self.default_task:
|
||||
existing_tasks = []
|
||||
entity_children = asset_entity.get('children', [])
|
||||
for child in entity_children:
|
||||
if child.entity_type.lower() == 'task':
|
||||
existing_tasks.append(child['name'].lower())
|
||||
|
||||
if task.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
|
||||
else:
|
||||
self.create_task(
|
||||
name=task,
|
||||
task_type=self.default_task_type,
|
||||
task_status=self.default_task_status,
|
||||
parent=asset_entity,
|
||||
)
|
||||
|
||||
# Find latest version
|
||||
latest_version = self._find_last_version(subset_name, asset_doc)
|
||||
version_number = 1
|
||||
if latest_version is not None:
|
||||
version_number += latest_version
|
||||
|
||||
self.log.info(
|
||||
"Next version of instance \"{}\" will be {}".format(
|
||||
instance_name, version_number
|
||||
)
|
||||
)
|
||||
|
||||
# update instance info
|
||||
instance.data["task"] = task
|
||||
instance.data["version_name"] = "{}_{}".format(subset_name, task)
|
||||
instance.data["family"] = family
|
||||
instance.data["subset"] = subset_name
|
||||
instance.data["version"] = version_number
|
||||
instance.data["latestVersion"] = latest_version
|
||||
instance.data["anatomyData"].update({
|
||||
"subset": subset_name,
|
||||
"family": family,
|
||||
"version": version_number
|
||||
})
|
||||
|
||||
# Copy `families` and check if `family` is not in current families
|
||||
families = instance.data.get("families") or list()
|
||||
if families:
|
||||
families = list(set(families))
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
# Prepare staging dir for new instance and zip + sanitize scene name
|
||||
staging_dir = tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
|
||||
# Handle if the representation is a .zip and not an .xstage
|
||||
pre_staged = False
|
||||
if submitted_files.endswith(".zip"):
|
||||
submitted_zip_file = os.path.join(submitted_staging_dir,
|
||||
submitted_files
|
||||
).replace("\\", "/")
|
||||
|
||||
pre_staged = self.sanitize_prezipped_project(instance,
|
||||
submitted_zip_file,
|
||||
staging_dir)
|
||||
|
||||
# Get the file to work with
|
||||
source_dir = str(repres[0]["stagingDir"])
|
||||
source_file = str(repres[0]["files"])
|
||||
|
||||
staging_scene_dir = os.path.join(staging_dir, "scene")
|
||||
staging_scene = os.path.join(staging_scene_dir, source_file)
|
||||
|
||||
# If the file is an .xstage / directory, we must stage it
|
||||
if not pre_staged:
|
||||
shutil.copytree(source_dir, staging_scene_dir)
|
||||
|
||||
# Rename this latest file as 'scene.xstage'
|
||||
# This is is determined in the collector from the latest scene in a
|
||||
# submitted directory / directory the submitted .xstage is in.
|
||||
# In the case of a zip file being submitted, this is determined within
|
||||
# the self.sanitize_project() method in this extractor.
|
||||
os.rename(staging_scene,
|
||||
os.path.join(staging_scene_dir, "scene.xstage")
|
||||
)
|
||||
|
||||
# Required to set the current directory where the zip will end up
|
||||
os.chdir(staging_dir)
|
||||
|
||||
# Create the zip file
|
||||
zip_filepath = shutil.make_archive(os.path.basename(source_dir),
|
||||
"zip",
|
||||
staging_scene_dir
|
||||
)
|
||||
|
||||
zip_filename = os.path.basename(zip_filepath)
|
||||
|
||||
self.log.info("Zip file: {}".format(zip_filepath))
|
||||
|
||||
# Setup representation
|
||||
new_repre = {
|
||||
"name": "zip",
|
||||
"ext": "zip",
|
||||
"files": zip_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
self.log.debug(
|
||||
"Creating new representation: {}".format(new_repre)
|
||||
)
|
||||
instance.data["representations"] = [new_repre]
|
||||
|
||||
self.log.debug("Completed prep of zipped Harmony scene: {}"
|
||||
.format(zip_filepath)
|
||||
)
|
||||
|
||||
# If this extractor is setup to also extract a workfile...
|
||||
if self.create_workfile:
|
||||
workfile_path = self.extract_workfile(instance,
|
||||
staging_scene
|
||||
)
|
||||
|
||||
self.log.debug("Extracted Workfile to: {}".format(workfile_path))
|
||||
|
||||
def extract_workfile(self, instance, staging_scene):
|
||||
"""Extract a valid workfile for this corresponding publish.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): Instance data.
|
||||
staging_scene (str): path of staging scene.
|
||||
|
||||
Returns:
|
||||
str: Path to workdir.
|
||||
|
||||
"""
|
||||
# Since the staging scene was renamed to "scene.xstage" for publish
|
||||
# rename the staging scene in the temp stagingdir
|
||||
staging_scene = os.path.join(os.path.dirname(staging_scene),
|
||||
"scene.xstage")
|
||||
|
||||
# Setup the data needed to form a valid work path filename
|
||||
anatomy = openpype.api.Anatomy()
|
||||
project_entity = instance.context.data["projectEntity"]
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
"name": instance.data["asset"]
|
||||
})
|
||||
|
||||
task_name = instance.data.get("task")
|
||||
task_type = asset_entity["data"]["tasks"][task_name].get("type")
|
||||
|
||||
if task_type:
|
||||
task_short = project_entity["config"]["tasks"].get(
|
||||
task_type, {}).get("short_name")
|
||||
else:
|
||||
task_short = None
|
||||
|
||||
data = {
|
||||
"root": api.registered_root(),
|
||||
"project": {
|
||||
"name": project_entity["name"],
|
||||
"code": project_entity["data"].get("code", '')
|
||||
},
|
||||
"asset": instance.data["asset"],
|
||||
"hierarchy": openpype.api.get_hierarchy(instance.data["asset"]),
|
||||
"family": instance.data["family"],
|
||||
"task": {
|
||||
"name": task_name,
|
||||
"type": task_type,
|
||||
"short": task_short,
|
||||
},
|
||||
"subset": instance.data["subset"],
|
||||
"version": 1,
|
||||
"ext": "zip",
|
||||
}
|
||||
host_name = "harmony"
|
||||
template_name = get_workfile_template_key(
|
||||
instance.data.get("task").get("type"),
|
||||
host_name,
|
||||
project_name=project_entity["name"],
|
||||
)
|
||||
|
||||
# Get a valid work filename first with version 1
|
||||
file_template = anatomy.templates[template_name]["file"]
|
||||
anatomy_filled = anatomy.format(data)
|
||||
work_path = anatomy_filled[template_name]["path"]
|
||||
|
||||
# Get the final work filename with the proper version
|
||||
data["version"] = api.last_workfile_with_version(
|
||||
os.path.dirname(work_path),
|
||||
file_template,
|
||||
data,
|
||||
api.HOST_WORKFILE_EXTENSIONS[host_name]
|
||||
)[1]
|
||||
|
||||
base_name = os.path.splitext(os.path.basename(work_path))[0]
|
||||
|
||||
staging_work_path = os.path.join(os.path.dirname(staging_scene),
|
||||
base_name + ".xstage"
|
||||
)
|
||||
|
||||
# Rename this latest file after the workfile path filename
|
||||
os.rename(staging_scene, staging_work_path)
|
||||
|
||||
# Required to set the current directory where the zip will end up
|
||||
os.chdir(os.path.dirname(os.path.dirname(staging_scene)))
|
||||
|
||||
# Create the zip file
|
||||
zip_filepath = shutil.make_archive(base_name,
|
||||
"zip",
|
||||
os.path.dirname(staging_scene)
|
||||
)
|
||||
self.log.info(staging_scene)
|
||||
self.log.info(work_path)
|
||||
self.log.info(staging_work_path)
|
||||
self.log.info(os.path.dirname(os.path.dirname(staging_scene)))
|
||||
self.log.info(base_name)
|
||||
self.log.info(zip_filepath)
|
||||
|
||||
# Create the work path on disk if it does not exist
|
||||
os.makedirs(os.path.dirname(work_path), exist_ok=True)
|
||||
shutil.copy(zip_filepath, work_path)
|
||||
|
||||
return work_path
|
||||
|
||||
def sanitize_prezipped_project(
|
||||
self, instance, zip_filepath, staging_dir):
|
||||
"""Fix when a zip contains a folder.
|
||||
|
||||
Handle zip file root contains folder instead of the project.
|
||||
|
||||
Args:
|
||||
instance (:class:`pyblish.api.Instance`): Instance data.
|
||||
zip_filepath (str): Path to zip.
|
||||
staging_dir (str): Path to staging directory.
|
||||
|
||||
"""
|
||||
zip = zipfile.ZipFile(zip_filepath)
|
||||
zip_contents = zipfile.ZipFile.namelist(zip)
|
||||
|
||||
# Determine if any xstage file is in root of zip
|
||||
project_in_root = [pth for pth in zip_contents
|
||||
if "/" not in pth and pth.endswith(".xstage")]
|
||||
|
||||
staging_scene_dir = os.path.join(staging_dir, "scene")
|
||||
|
||||
# The project is nested, so we must extract and move it
|
||||
if not project_in_root:
|
||||
|
||||
staging_tmp_dir = os.path.join(staging_dir, "tmp")
|
||||
|
||||
with zipfile.ZipFile(zip_filepath, "r") as zip_ref:
|
||||
zip_ref.extractall(staging_tmp_dir)
|
||||
|
||||
nested_project_folder = os.path.join(staging_tmp_dir,
|
||||
zip_contents[0]
|
||||
)
|
||||
|
||||
shutil.copytree(nested_project_folder, staging_scene_dir)
|
||||
|
||||
else:
|
||||
# The project is not nested, so we just extract to scene folder
|
||||
with zipfile.ZipFile(zip_filepath, "r") as zip_ref:
|
||||
zip_ref.extractall(staging_scene_dir)
|
||||
|
||||
latest_file = max(glob.iglob(staging_scene_dir + "/*.xstage"),
|
||||
key=os.path.getctime).replace("\\", "/")
|
||||
|
||||
instance.data["representations"][0]["stagingDir"] = staging_scene_dir
|
||||
instance.data["representations"][0]["files"] = os.path.basename(
|
||||
latest_file)
|
||||
|
||||
# We have staged the scene already so return True
|
||||
return True
|
||||
|
||||
def _find_last_version(self, subset_name, asset_doc):
|
||||
"""Find last version of subset."""
|
||||
subset_doc = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_doc["_id"]
|
||||
})
|
||||
|
||||
if subset_doc is None:
|
||||
self.log.debug("Subset entity does not exist yet.")
|
||||
else:
|
||||
version_doc = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
if version_doc:
|
||||
return int(version_doc["name"])
|
||||
return None
|
||||
|
||||
def _get_all_task_types(self, project):
|
||||
"""Get all task types."""
|
||||
tasks = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_types = proj_template['_task_type_schema']['types']
|
||||
|
||||
for type in temp_task_types:
|
||||
if type['name'] not in tasks:
|
||||
tasks[type['name']] = type
|
||||
|
||||
return tasks
|
||||
|
||||
def _get_all_task_statuses(self, project):
|
||||
"""Get all statuses of tasks."""
|
||||
statuses = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_statuses = proj_template.get_statuses("Task")
|
||||
|
||||
for status in temp_task_statuses:
|
||||
if status['name'] not in statuses:
|
||||
statuses[status['name']] = status
|
||||
|
||||
return statuses
|
||||
|
||||
def _get_all_assetversion_statuses(self, project):
|
||||
"""Get statuses of all asset versions."""
|
||||
statuses = {}
|
||||
proj_template = project['project_schema']
|
||||
temp_task_statuses = proj_template.get_statuses("AssetVersion")
|
||||
|
||||
for status in temp_task_statuses:
|
||||
if status['name'] not in statuses:
|
||||
statuses[status['name']] = status
|
||||
|
||||
return statuses
|
||||
|
||||
def _create_task(self, name, task_type, parent, task_status):
|
||||
"""Create task."""
|
||||
task_data = {
|
||||
'name': name,
|
||||
'parent': parent,
|
||||
}
|
||||
self.log.info(task_type)
|
||||
task_data['type'] = self.task_types[task_type]
|
||||
task_data['status'] = self.task_statuses[task_status]
|
||||
self.log.info(task_data)
|
||||
task = self.session.create('Task', task_data)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
return task
|
||||
102
openpype/hosts/tvpaint/plugins/load/load_workfile.py
Normal file
102
openpype/hosts/tvpaint/plugins/load/load_workfile.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import getpass
|
||||
import os
|
||||
|
||||
from avalon.tvpaint import lib, pipeline, get_current_workfile_context
|
||||
from avalon import api, io
|
||||
from openpype.lib import (
|
||||
get_workfile_template_key_from_context,
|
||||
get_workdir_data
|
||||
)
|
||||
from openpype.api import Anatomy
|
||||
|
||||
|
||||
class LoadWorkfile(pipeline.Loader):
|
||||
"""Load workfile."""
|
||||
|
||||
families = ["workfile"]
|
||||
representations = ["tvpp"]
|
||||
|
||||
label = "Load Workfile"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
# Load context of current workfile as first thing
|
||||
# - which context and extension has
|
||||
host = api.registered_host()
|
||||
current_file = host.current_file()
|
||||
|
||||
context = get_current_workfile_context()
|
||||
|
||||
filepath = self.fname.replace("\\", "/")
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
raise FileExistsError(
|
||||
"The loaded file does not exist. Try downloading it first."
|
||||
)
|
||||
|
||||
george_script = "tv_LoadProject '\"'\"{}\"'\"'".format(
|
||||
filepath
|
||||
)
|
||||
lib.execute_george_through_file(george_script)
|
||||
|
||||
# Save workfile.
|
||||
host_name = "tvpaint"
|
||||
asset_name = context.get("asset")
|
||||
task_name = context.get("task")
|
||||
# Far cases when there is workfile without context
|
||||
if not asset_name:
|
||||
asset_name = io.Session["AVALON_ASSET"]
|
||||
task_name = io.Session["AVALON_TASK"]
|
||||
|
||||
project_doc = io.find_one({
|
||||
"type": "project"
|
||||
})
|
||||
asset_doc = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
project_name = project_doc["name"]
|
||||
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
asset_name,
|
||||
task_name,
|
||||
host_name,
|
||||
project_name=project_name,
|
||||
dbcon=io
|
||||
)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = get_workdir_data(project_doc, asset_doc, task_name, host_name)
|
||||
data["root"] = anatomy.roots
|
||||
data["user"] = getpass.getuser()
|
||||
|
||||
template = anatomy.templates[template_key]["file"]
|
||||
|
||||
# Define saving file extension
|
||||
if current_file:
|
||||
# Match the extension of current file
|
||||
_, extension = os.path.splitext(current_file)
|
||||
else:
|
||||
# Fall back to the first extension supported for this host.
|
||||
extension = host.file_extensions()[0]
|
||||
|
||||
data["ext"] = extension
|
||||
|
||||
work_root = api.format_template_with_optional_keys(
|
||||
data, anatomy.templates[template_key]["folder"]
|
||||
)
|
||||
version = api.last_workfile_with_version(
|
||||
work_root, template, data, host.file_extensions()
|
||||
)[1]
|
||||
|
||||
if version is None:
|
||||
version = 1
|
||||
else:
|
||||
version += 1
|
||||
|
||||
data["version"] = version
|
||||
|
||||
path = os.path.join(
|
||||
work_root,
|
||||
api.format_template_with_optional_keys(data, template)
|
||||
)
|
||||
host.save_file(path)
|
||||
|
|
@ -258,19 +258,40 @@ def get_hierarchy(asset_name=None):
|
|||
return "/".join(hierarchy_items)
|
||||
|
||||
|
||||
@with_avalon
|
||||
def get_linked_assets(asset_entity):
|
||||
"""Return linked assets for `asset_entity` from DB
|
||||
def get_linked_asset_ids(asset_doc):
|
||||
"""Return linked asset ids for `asset_doc` from DB
|
||||
|
||||
Args:
|
||||
asset_entity (dict): asset document from DB
|
||||
Args:
|
||||
asset_doc (dict): Asset document from DB.
|
||||
|
||||
Returns:
|
||||
(list) of MongoDB documents
|
||||
Returns:
|
||||
(list): MongoDB ids of input links.
|
||||
"""
|
||||
inputs = asset_entity["data"].get("inputs", [])
|
||||
inputs = [avalon.io.find_one({"_id": x}) for x in inputs]
|
||||
return inputs
|
||||
output = []
|
||||
if not asset_doc:
|
||||
return output
|
||||
|
||||
input_links = asset_doc["data"].get("inputsLinks") or []
|
||||
if input_links:
|
||||
output = [item["_id"] for item in input_links]
|
||||
return output
|
||||
|
||||
|
||||
@with_avalon
|
||||
def get_linked_assets(asset_doc):
|
||||
"""Return linked assets for `asset_doc` from DB
|
||||
|
||||
Args:
|
||||
asset_doc (dict): Asset document from DB
|
||||
|
||||
Returns:
|
||||
(list) Asset documents of input links for passed asset doc.
|
||||
"""
|
||||
link_ids = get_linked_asset_ids(asset_doc)
|
||||
if not link_ids:
|
||||
return []
|
||||
|
||||
return list(avalon.io.find({"_id": {"$in": link_ids}}))
|
||||
|
||||
|
||||
@with_avalon
|
||||
|
|
|
|||
|
|
@ -71,18 +71,24 @@ def ffprobe_streams(path_to_file, logger=None):
|
|||
"Getting information about input \"{}\".".format(path_to_file)
|
||||
)
|
||||
args = [
|
||||
"\"{}\"".format(get_ffmpeg_tool_path("ffprobe")),
|
||||
"-v quiet",
|
||||
"-print_format json",
|
||||
get_ffmpeg_tool_path("ffprobe"),
|
||||
"-hide_banner",
|
||||
"-loglevel", "fatal",
|
||||
"-show_error",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
"\"{}\"".format(path_to_file)
|
||||
"-show_programs",
|
||||
"-show_chapters",
|
||||
"-show_private_data",
|
||||
"-print_format", "json",
|
||||
path_to_file
|
||||
]
|
||||
command = " ".join(args)
|
||||
logger.debug("FFprobe command: \"{}\"".format(command))
|
||||
|
||||
logger.debug("FFprobe command: {}".format(
|
||||
subprocess.list2cmdline(args)
|
||||
))
|
||||
popen = subprocess.Popen(
|
||||
command,
|
||||
shell=True,
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
|
|
|
|||
|
|
@ -288,6 +288,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"pluginInfo", {})
|
||||
)
|
||||
|
||||
self.limit_groups = (
|
||||
context.data["project_settings"].get(
|
||||
"deadline", {}).get(
|
||||
"publish", {}).get(
|
||||
"MayaSubmitDeadline", {}).get(
|
||||
"limit", [])
|
||||
)
|
||||
|
||||
self.group = (
|
||||
context.data["project_settings"].get(
|
||||
"deadline", {}).get(
|
||||
"publish", {}).get(
|
||||
"MayaSubmitDeadline", {}).get(
|
||||
"group", "none")
|
||||
)
|
||||
|
||||
context = instance.context
|
||||
workspace = context.data["workspaceDir"]
|
||||
anatomy = context.data['anatomy']
|
||||
|
|
|
|||
|
|
@ -0,0 +1,147 @@
|
|||
from pymongo import UpdateOne
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
from openpype_modules.ftrack.lib import (
|
||||
CUST_ATTR_ID_KEY,
|
||||
query_custom_attributes,
|
||||
|
||||
BaseEvent
|
||||
)
|
||||
|
||||
|
||||
class SyncLinksToAvalon(BaseEvent):
|
||||
"""Synchronize inpug linkts to avalon documents."""
|
||||
# Run after sync to avalon event handler
|
||||
priority = 110
|
||||
|
||||
def __init__(self, session):
|
||||
self.dbcon = AvalonMongoDB()
|
||||
|
||||
super(SyncLinksToAvalon, self).__init__(session)
|
||||
|
||||
def launch(self, session, event):
|
||||
# Try to commit and if any error happen then recreate session
|
||||
entities_info = event["data"]["entities"]
|
||||
dependency_changes = []
|
||||
removed_entities = set()
|
||||
for entity_info in entities_info:
|
||||
action = entity_info.get("action")
|
||||
entityType = entity_info.get("entityType")
|
||||
if action not in ("remove", "add"):
|
||||
continue
|
||||
|
||||
if entityType == "task":
|
||||
removed_entities.add(entity_info["entityId"])
|
||||
elif entityType == "dependency":
|
||||
dependency_changes.append(entity_info)
|
||||
|
||||
# Care only about dependency changes
|
||||
if not dependency_changes:
|
||||
return
|
||||
|
||||
project_id = None
|
||||
for entity_info in dependency_changes:
|
||||
for parent_info in entity_info["parents"]:
|
||||
if parent_info["entityType"] == "show":
|
||||
project_id = parent_info["entityId"]
|
||||
if project_id is not None:
|
||||
break
|
||||
|
||||
changed_to_ids = set()
|
||||
for entity_info in dependency_changes:
|
||||
to_id_change = entity_info["changes"]["to_id"]
|
||||
if to_id_change["new"] is not None:
|
||||
changed_to_ids.add(to_id_change["new"])
|
||||
|
||||
if to_id_change["old"] is not None:
|
||||
changed_to_ids.add(to_id_change["old"])
|
||||
|
||||
self._update_in_links(session, changed_to_ids, project_id)
|
||||
|
||||
def _update_in_links(self, session, ftrack_ids, project_id):
|
||||
if not ftrack_ids or project_id is None:
|
||||
return
|
||||
|
||||
attr_def = session.query((
|
||||
"select id from CustomAttributeConfiguration where key is \"{}\""
|
||||
).format(CUST_ATTR_ID_KEY)).first()
|
||||
if attr_def is None:
|
||||
return
|
||||
|
||||
project_entity = session.query((
|
||||
"select full_name from Project where id is \"{}\""
|
||||
).format(project_id)).first()
|
||||
if not project_entity:
|
||||
return
|
||||
|
||||
project_name = project_entity["full_name"]
|
||||
mongo_id_by_ftrack_id = self._get_mongo_ids_by_ftrack_ids(
|
||||
session, attr_def["id"], ftrack_ids
|
||||
)
|
||||
|
||||
filtered_ftrack_ids = tuple(mongo_id_by_ftrack_id.keys())
|
||||
context_links = session.query((
|
||||
"select from_id, to_id from TypedContextLink where to_id in ({})"
|
||||
).format(self.join_query_keys(filtered_ftrack_ids))).all()
|
||||
|
||||
mapping_by_to_id = {
|
||||
ftrack_id: set()
|
||||
for ftrack_id in filtered_ftrack_ids
|
||||
}
|
||||
all_from_ids = set()
|
||||
for context_link in context_links:
|
||||
to_id = context_link["to_id"]
|
||||
from_id = context_link["from_id"]
|
||||
if from_id == to_id:
|
||||
continue
|
||||
all_from_ids.add(from_id)
|
||||
mapping_by_to_id[to_id].add(from_id)
|
||||
|
||||
mongo_id_by_ftrack_id.update(self._get_mongo_ids_by_ftrack_ids(
|
||||
session, attr_def["id"], all_from_ids
|
||||
))
|
||||
self.log.info(mongo_id_by_ftrack_id)
|
||||
bulk_writes = []
|
||||
for to_id, from_ids in mapping_by_to_id.items():
|
||||
dst_mongo_id = mongo_id_by_ftrack_id[to_id]
|
||||
links = []
|
||||
for ftrack_id in from_ids:
|
||||
link_mongo_id = mongo_id_by_ftrack_id.get(ftrack_id)
|
||||
if link_mongo_id is None:
|
||||
continue
|
||||
|
||||
links.append({
|
||||
"_id": ObjectId(link_mongo_id),
|
||||
"linkedBy": "ftrack",
|
||||
"type": "breakdown"
|
||||
})
|
||||
|
||||
bulk_writes.append(UpdateOne(
|
||||
{"_id": ObjectId(dst_mongo_id)},
|
||||
{"$set": {"data.inputLinks": links}}
|
||||
))
|
||||
|
||||
if bulk_writes:
|
||||
self.dbcon.database[project_name].bulk_write(bulk_writes)
|
||||
|
||||
def _get_mongo_ids_by_ftrack_ids(self, session, attr_id, ftrack_ids):
|
||||
output = query_custom_attributes(
|
||||
session, [attr_id], ftrack_ids
|
||||
)
|
||||
mongo_id_by_ftrack_id = {}
|
||||
for item in output:
|
||||
mongo_id = item["value"]
|
||||
if not mongo_id:
|
||||
continue
|
||||
|
||||
ftrack_id = item["entity_id"]
|
||||
|
||||
mongo_id_by_ftrack_id[ftrack_id] = mongo_id
|
||||
return mongo_id_by_ftrack_id
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
SyncLinksToAvalon(session).register()
|
||||
|
|
@ -22,7 +22,7 @@ from .custom_attributes import get_openpype_attr
|
|||
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
from pymongo import UpdateOne
|
||||
from pymongo import UpdateOne, ReplaceOne
|
||||
import ftrack_api
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
@ -328,7 +328,7 @@ class SyncEntitiesFactory:
|
|||
server_url=self._server_url,
|
||||
api_key=self._api_key,
|
||||
api_user=self._api_user,
|
||||
auto_connect_event_hub=True
|
||||
auto_connect_event_hub=False
|
||||
)
|
||||
|
||||
self.duplicates = {}
|
||||
|
|
@ -341,6 +341,7 @@ class SyncEntitiesFactory:
|
|||
}
|
||||
|
||||
self.create_list = []
|
||||
self.unarchive_list = []
|
||||
self.updates = collections.defaultdict(dict)
|
||||
|
||||
self.avalon_project = None
|
||||
|
|
@ -1169,16 +1170,43 @@ class SyncEntitiesFactory:
|
|||
entity
|
||||
)
|
||||
|
||||
def _get_input_links(self, ftrack_ids):
|
||||
tupled_ids = tuple(ftrack_ids)
|
||||
mapping_by_to_id = {
|
||||
ftrack_id: set()
|
||||
for ftrack_id in tupled_ids
|
||||
}
|
||||
ids_len = len(tupled_ids)
|
||||
chunk_size = int(5000 / ids_len)
|
||||
all_links = []
|
||||
for idx in range(0, ids_len, chunk_size):
|
||||
entity_ids_joined = join_query_keys(
|
||||
tupled_ids[idx:idx + chunk_size]
|
||||
)
|
||||
|
||||
all_links.extend(self.session.query((
|
||||
"select from_id, to_id from"
|
||||
" TypedContextLink where to_id in ({})"
|
||||
).format(entity_ids_joined)).all())
|
||||
|
||||
for context_link in all_links:
|
||||
to_id = context_link["to_id"]
|
||||
from_id = context_link["from_id"]
|
||||
if from_id == to_id:
|
||||
continue
|
||||
mapping_by_to_id[to_id].add(from_id)
|
||||
return mapping_by_to_id
|
||||
|
||||
def prepare_ftrack_ent_data(self):
|
||||
not_set_ids = []
|
||||
for id, entity_dict in self.entities_dict.items():
|
||||
for ftrack_id, entity_dict in self.entities_dict.items():
|
||||
entity = entity_dict["entity"]
|
||||
if entity is None:
|
||||
not_set_ids.append(id)
|
||||
not_set_ids.append(ftrack_id)
|
||||
continue
|
||||
|
||||
self.entities_dict[id]["final_entity"] = {}
|
||||
self.entities_dict[id]["final_entity"]["name"] = (
|
||||
self.entities_dict[ftrack_id]["final_entity"] = {}
|
||||
self.entities_dict[ftrack_id]["final_entity"]["name"] = (
|
||||
entity_dict["name"]
|
||||
)
|
||||
data = {}
|
||||
|
|
@ -1191,58 +1219,59 @@ class SyncEntitiesFactory:
|
|||
for key, val in entity_dict.get("hier_attrs", []).items():
|
||||
data[key] = val
|
||||
|
||||
if id == self.ft_project_id:
|
||||
project_name = entity["full_name"]
|
||||
data["code"] = entity["name"]
|
||||
self.entities_dict[id]["final_entity"]["data"] = data
|
||||
self.entities_dict[id]["final_entity"]["type"] = "project"
|
||||
if ftrack_id != self.ft_project_id:
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
|
||||
proj_schema = entity["project_schema"]
|
||||
task_types = proj_schema["_task_type_schema"]["types"]
|
||||
proj_apps, warnings = get_project_apps(
|
||||
data.pop("applications", [])
|
||||
)
|
||||
for msg, items in warnings.items():
|
||||
if not msg or not items:
|
||||
continue
|
||||
self.report_items["warning"][msg] = items
|
||||
|
||||
current_project_anatomy_data = get_anatomy_settings(
|
||||
project_name, exclude_locals=True
|
||||
)
|
||||
anatomy_tasks = current_project_anatomy_data["tasks"]
|
||||
tasks = {}
|
||||
default_type_data = {
|
||||
"short_name": ""
|
||||
}
|
||||
for task_type in task_types:
|
||||
task_type_name = task_type["name"]
|
||||
tasks[task_type_name] = copy.deepcopy(
|
||||
anatomy_tasks.get(task_type_name)
|
||||
or default_type_data
|
||||
)
|
||||
|
||||
project_config = {
|
||||
"tasks": tasks,
|
||||
"apps": proj_apps
|
||||
}
|
||||
for key, value in current_project_anatomy_data.items():
|
||||
if key in project_config or key == "attributes":
|
||||
continue
|
||||
project_config[key] = value
|
||||
|
||||
self.entities_dict[id]["final_entity"]["config"] = (
|
||||
project_config
|
||||
)
|
||||
data["parents"] = parents
|
||||
data["tasks"] = self.entities_dict[ftrack_id].pop("tasks", {})
|
||||
self.entities_dict[ftrack_id]["final_entity"]["data"] = data
|
||||
self.entities_dict[ftrack_id]["final_entity"]["type"] = "asset"
|
||||
continue
|
||||
project_name = entity["full_name"]
|
||||
data["code"] = entity["name"]
|
||||
self.entities_dict[ftrack_id]["final_entity"]["data"] = data
|
||||
self.entities_dict[ftrack_id]["final_entity"]["type"] = (
|
||||
"project"
|
||||
)
|
||||
|
||||
ent_path_items = [ent["name"] for ent in entity["link"]]
|
||||
parents = ent_path_items[1:len(ent_path_items) - 1:]
|
||||
proj_schema = entity["project_schema"]
|
||||
task_types = proj_schema["_task_type_schema"]["types"]
|
||||
proj_apps, warnings = get_project_apps(
|
||||
data.pop("applications", [])
|
||||
)
|
||||
for msg, items in warnings.items():
|
||||
if not msg or not items:
|
||||
continue
|
||||
self.report_items["warning"][msg] = items
|
||||
|
||||
data["parents"] = parents
|
||||
data["tasks"] = self.entities_dict[id].pop("tasks", {})
|
||||
self.entities_dict[id]["final_entity"]["data"] = data
|
||||
self.entities_dict[id]["final_entity"]["type"] = "asset"
|
||||
current_project_anatomy_data = get_anatomy_settings(
|
||||
project_name, exclude_locals=True
|
||||
)
|
||||
anatomy_tasks = current_project_anatomy_data["tasks"]
|
||||
tasks = {}
|
||||
default_type_data = {
|
||||
"short_name": ""
|
||||
}
|
||||
for task_type in task_types:
|
||||
task_type_name = task_type["name"]
|
||||
tasks[task_type_name] = copy.deepcopy(
|
||||
anatomy_tasks.get(task_type_name)
|
||||
or default_type_data
|
||||
)
|
||||
|
||||
project_config = {
|
||||
"tasks": tasks,
|
||||
"apps": proj_apps
|
||||
}
|
||||
for key, value in current_project_anatomy_data.items():
|
||||
if key in project_config or key == "attributes":
|
||||
continue
|
||||
project_config[key] = value
|
||||
|
||||
self.entities_dict[ftrack_id]["final_entity"]["config"] = (
|
||||
project_config
|
||||
)
|
||||
|
||||
if not_set_ids:
|
||||
self.log.debug((
|
||||
|
|
@ -1433,6 +1462,28 @@ class SyncEntitiesFactory:
|
|||
for child_id in entity_dict["children"]:
|
||||
children_queue.append(child_id)
|
||||
|
||||
def set_input_links(self):
|
||||
ftrack_ids = set(self.create_ftrack_ids) | set(self.update_ftrack_ids)
|
||||
|
||||
input_links_by_ftrack_id = self._get_input_links(ftrack_ids)
|
||||
|
||||
for ftrack_id in ftrack_ids:
|
||||
input_links = []
|
||||
final_entity = self.entities_dict[ftrack_id]["final_entity"]
|
||||
final_entity["data"]["inputLinks"] = input_links
|
||||
link_ids = input_links_by_ftrack_id[ftrack_id]
|
||||
if not link_ids:
|
||||
continue
|
||||
|
||||
for ftrack_link_id in link_ids:
|
||||
mongo_id = self.ftrack_avalon_mapper.get(ftrack_link_id)
|
||||
if mongo_id is not None:
|
||||
input_links.append({
|
||||
"_id": ObjectId(mongo_id),
|
||||
"linkedBy": "ftrack",
|
||||
"type": "breakdown"
|
||||
})
|
||||
|
||||
def prepare_changes(self):
|
||||
self.log.debug("* Preparing changes for avalon/ftrack")
|
||||
hierarchy_changing_ids = []
|
||||
|
|
@ -1806,9 +1857,28 @@ class SyncEntitiesFactory:
|
|||
for ftrack_id in self.create_ftrack_ids:
|
||||
# CHECK it is possible that entity was already created
|
||||
# because is parent of another entity which was processed first
|
||||
if ftrack_id in self.ftrack_avalon_mapper:
|
||||
continue
|
||||
self.create_avalon_entity(ftrack_id)
|
||||
if ftrack_id not in self.ftrack_avalon_mapper:
|
||||
self.create_avalon_entity(ftrack_id)
|
||||
|
||||
self.set_input_links()
|
||||
|
||||
unarchive_writes = []
|
||||
for item in self.unarchive_list:
|
||||
mongo_id = item["_id"]
|
||||
unarchive_writes.append(ReplaceOne(
|
||||
{"_id": mongo_id},
|
||||
item
|
||||
))
|
||||
av_ent_path_items = item["data"]["parents"]
|
||||
av_ent_path_items.append(item["name"])
|
||||
av_ent_path = "/".join(av_ent_path_items)
|
||||
self.log.debug(
|
||||
"Entity was unarchived <{}>".format(av_ent_path)
|
||||
)
|
||||
self.remove_from_archived(mongo_id)
|
||||
|
||||
if unarchive_writes:
|
||||
self.dbcon.bulk_write(unarchive_writes)
|
||||
|
||||
if len(self.create_list) > 0:
|
||||
self.dbcon.insert_many(self.create_list)
|
||||
|
|
@ -1899,14 +1969,8 @@ class SyncEntitiesFactory:
|
|||
|
||||
if unarchive is False:
|
||||
self.create_list.append(item)
|
||||
return
|
||||
# If unarchive then replace entity data in database
|
||||
self.dbcon.replace_one({"_id": new_id}, item)
|
||||
self.remove_from_archived(mongo_id)
|
||||
av_ent_path_items = item["data"]["parents"]
|
||||
av_ent_path_items.append(item["name"])
|
||||
av_ent_path = "/".join(av_ent_path_items)
|
||||
self.log.debug("Entity was unarchived <{}>".format(av_ent_path))
|
||||
else:
|
||||
self.unarchive_list.append(item)
|
||||
|
||||
def check_unarchivation(self, ftrack_id, mongo_id, name):
|
||||
archived_by_id = self.avalon_archived_by_id.get(mongo_id)
|
||||
|
|
|
|||
|
|
@ -1,208 +1,266 @@
|
|||
import pyblish.api
|
||||
import json
|
||||
import os
|
||||
import json
|
||||
import copy
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect ftrack component data
|
||||
"""Collect ftrack component data (not integrate yet).
|
||||
|
||||
Add ftrack component list to instance.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.48
|
||||
label = 'Integrate Ftrack Component'
|
||||
label = "Integrate Ftrack Component"
|
||||
families = ["ftrack"]
|
||||
|
||||
family_mapping = {'camera': 'cam',
|
||||
'look': 'look',
|
||||
'mayaascii': 'scene',
|
||||
'model': 'geo',
|
||||
'rig': 'rig',
|
||||
'setdress': 'setdress',
|
||||
'pointcache': 'cache',
|
||||
'render': 'render',
|
||||
'render2d': 'render',
|
||||
'nukescript': 'comp',
|
||||
'write': 'render',
|
||||
'review': 'mov',
|
||||
'plate': 'img',
|
||||
'audio': 'audio',
|
||||
'workfile': 'scene',
|
||||
'animation': 'cache',
|
||||
'image': 'img',
|
||||
'reference': 'reference'
|
||||
}
|
||||
family_mapping = {
|
||||
"camera": "cam",
|
||||
"look": "look",
|
||||
"mayaascii": "scene",
|
||||
"model": "geo",
|
||||
"rig": "rig",
|
||||
"setdress": "setdress",
|
||||
"pointcache": "cache",
|
||||
"render": "render",
|
||||
"render2d": "render",
|
||||
"nukescript": "comp",
|
||||
"write": "render",
|
||||
"review": "mov",
|
||||
"plate": "img",
|
||||
"audio": "audio",
|
||||
"workfile": "scene",
|
||||
"animation": "cache",
|
||||
"image": "img",
|
||||
"reference": "reference"
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
self.ftrack_locations = {}
|
||||
self.log.debug('instance {}'.format(instance))
|
||||
self.log.debug("instance {}".format(instance))
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
else:
|
||||
instance_version = instance.data.get("version")
|
||||
if instance_version is None:
|
||||
raise ValueError("Instance version not set")
|
||||
|
||||
family = instance.data['family'].lower()
|
||||
version_number = int(instance_version)
|
||||
|
||||
family = instance.data["family"]
|
||||
family_low = instance.data["family"].lower()
|
||||
|
||||
asset_type = instance.data.get("ftrackFamily")
|
||||
if not asset_type and family in self.family_mapping:
|
||||
asset_type = self.family_mapping[family]
|
||||
if not asset_type and family_low in self.family_mapping:
|
||||
asset_type = self.family_mapping[family_low]
|
||||
|
||||
# Ignore this instance if neither "ftrackFamily" or a family mapping is
|
||||
# found.
|
||||
if not asset_type:
|
||||
self.log.info((
|
||||
"Family \"{}\" does not match any asset type mapping"
|
||||
).format(family))
|
||||
return
|
||||
|
||||
componentList = []
|
||||
instance_repres = instance.data.get("representations")
|
||||
if not instance_repres:
|
||||
self.log.info((
|
||||
"Skipping instance. Does not have any representations {}"
|
||||
).format(str(instance)))
|
||||
return
|
||||
|
||||
# Prepare FPS
|
||||
instance_fps = instance.data.get("fps")
|
||||
if instance_fps is None:
|
||||
instance_fps = instance.context.data["fps"]
|
||||
|
||||
# Base of component item data
|
||||
# - create a copy of this object when want to use it
|
||||
base_component_item = {
|
||||
"assettype_data": {
|
||||
"short": asset_type,
|
||||
},
|
||||
"asset_data": {
|
||||
"name": instance.data["subset"],
|
||||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
"comment": instance.context.data.get("comment") or ""
|
||||
},
|
||||
"component_overwrite": False,
|
||||
# This can be change optionally
|
||||
"thumbnail": False,
|
||||
# These must be changed for each component
|
||||
"component_data": None,
|
||||
"component_path": None,
|
||||
"component_location": None
|
||||
}
|
||||
|
||||
ft_session = instance.context.data["ftrackSession"]
|
||||
|
||||
for comp in instance.data['representations']:
|
||||
self.log.debug('component {}'.format(comp))
|
||||
# Filter types of representations
|
||||
review_representations = []
|
||||
thumbnail_representations = []
|
||||
other_representations = []
|
||||
for repre in instance_repres:
|
||||
self.log.debug("Representation {}".format(repre))
|
||||
repre_tags = repre.get("tags") or []
|
||||
if repre.get("thumbnail") or "thumbnail" in repre_tags:
|
||||
thumbnail_representations.append(repre)
|
||||
|
||||
if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])):
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.server', ft_session
|
||||
)
|
||||
component_data = {
|
||||
"name": "thumbnail" # Default component name is "main".
|
||||
}
|
||||
comp['thumbnail'] = True
|
||||
comp_files = comp["files"]
|
||||
elif "ftrackreview" in repre_tags:
|
||||
review_representations.append(repre)
|
||||
|
||||
else:
|
||||
other_representations.append(repre)
|
||||
|
||||
# Prepare ftrack locations
|
||||
unmanaged_location = ft_session.query(
|
||||
"Location where name is \"ftrack.unmanaged\""
|
||||
).one()
|
||||
ftrack_server_location = ft_session.query(
|
||||
"Location where name is \"ftrack.server\""
|
||||
).one()
|
||||
|
||||
# Components data
|
||||
component_list = []
|
||||
# Components that will be duplicated to unmanaged location
|
||||
src_components_to_add = []
|
||||
|
||||
# Create thumbnail components
|
||||
# TODO what if there is multiple thumbnails?
|
||||
first_thumbnail_component = None
|
||||
for repre in thumbnail_representations:
|
||||
published_path = repre.get("published_path")
|
||||
if not published_path:
|
||||
comp_files = repre["files"]
|
||||
if isinstance(comp_files, (tuple, list, set)):
|
||||
filename = comp_files[0]
|
||||
else:
|
||||
filename = comp_files
|
||||
|
||||
comp['published_path'] = os.path.join(
|
||||
comp['stagingDir'], filename
|
||||
)
|
||||
|
||||
elif comp.get('ftrackreview') or ("ftrackreview" in comp.get('tags', [])):
|
||||
'''
|
||||
Ftrack bug requirement:
|
||||
- Start frame must be 0
|
||||
- End frame must be {duration}
|
||||
EXAMPLE: When mov has 55 frames:
|
||||
- Start frame should be 0
|
||||
- End frame should be 55 (do not ask why please!)
|
||||
'''
|
||||
start_frame = 0
|
||||
end_frame = 1
|
||||
if 'frameEndFtrack' in comp and 'frameStartFtrack' in comp:
|
||||
end_frame += (
|
||||
comp['frameEndFtrack'] - comp['frameStartFtrack']
|
||||
)
|
||||
else:
|
||||
end_frame += (
|
||||
instance.data["frameEnd"] - instance.data["frameStart"]
|
||||
)
|
||||
|
||||
fps = comp.get('fps')
|
||||
if fps is None:
|
||||
fps = instance.data.get(
|
||||
"fps", instance.context.data['fps']
|
||||
)
|
||||
|
||||
comp['fps'] = fps
|
||||
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.server', ft_session
|
||||
published_path = os.path.join(
|
||||
repre["stagingDir"], filename
|
||||
)
|
||||
component_data = {
|
||||
# Default component name is "main".
|
||||
"name": "ftrackreview-mp4",
|
||||
"metadata": {'ftr_meta': json.dumps({
|
||||
'frameIn': int(start_frame),
|
||||
'frameOut': int(end_frame),
|
||||
'frameRate': float(comp['fps'])})}
|
||||
}
|
||||
comp['thumbnail'] = False
|
||||
else:
|
||||
component_data = {
|
||||
"name": comp['name']
|
||||
}
|
||||
location = self.get_ftrack_location(
|
||||
'ftrack.unmanaged', ft_session
|
||||
)
|
||||
comp['thumbnail'] = False
|
||||
if not os.path.exists(published_path):
|
||||
continue
|
||||
repre["published_path"] = published_path
|
||||
|
||||
self.log.debug('location {}'.format(location))
|
||||
|
||||
component_item = {
|
||||
"assettype_data": {
|
||||
"short": asset_type,
|
||||
},
|
||||
"asset_data": {
|
||||
"name": instance.data["subset"],
|
||||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
"comment": instance.context.data.get("comment", "")
|
||||
},
|
||||
"component_data": component_data,
|
||||
"component_path": comp['published_path'],
|
||||
'component_location': location,
|
||||
"component_overwrite": False,
|
||||
"thumbnail": comp['thumbnail']
|
||||
# Create copy of base comp item and append it
|
||||
thumbnail_item = copy.deepcopy(base_component_item)
|
||||
thumbnail_item["component_path"] = repre["published_path"]
|
||||
thumbnail_item["component_data"] = {
|
||||
"name": "thumbnail"
|
||||
}
|
||||
thumbnail_item["thumbnail"] = True
|
||||
# Create copy of item before setting location
|
||||
src_components_to_add.append(copy.deepcopy(thumbnail_item))
|
||||
# Create copy of first thumbnail
|
||||
if first_thumbnail_component is None:
|
||||
first_thumbnail_component = copy.deepcopy(thumbnail_item)
|
||||
# Set location
|
||||
thumbnail_item["component_location"] = ftrack_server_location
|
||||
# Add item to component list
|
||||
component_list.append(thumbnail_item)
|
||||
|
||||
# Add custom attributes for AssetVersion
|
||||
assetversion_cust_attrs = {}
|
||||
intent_val = instance.context.data.get("intent")
|
||||
if intent_val and isinstance(intent_val, dict):
|
||||
intent_val = intent_val.get("value")
|
||||
# Create review components
|
||||
# Change asset name of each new component for review
|
||||
is_first_review_repre = True
|
||||
not_first_components = []
|
||||
for repre in review_representations:
|
||||
frame_start = repre.get("frameStartFtrack")
|
||||
frame_end = repre.get("frameEndFtrack")
|
||||
if frame_start is None or frame_end is None:
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
|
||||
if intent_val:
|
||||
assetversion_cust_attrs["intent"] = intent_val
|
||||
# Frame end of uploaded video file should be duration in frames
|
||||
# - frame start is always 0
|
||||
# - frame end is duration in frames
|
||||
duration = frame_end - frame_start + 1
|
||||
|
||||
component_item["assetversion_data"]["custom_attributes"] = (
|
||||
assetversion_cust_attrs
|
||||
)
|
||||
fps = repre.get("fps")
|
||||
if fps is None:
|
||||
fps = instance_fps
|
||||
|
||||
componentList.append(component_item)
|
||||
# Create copy with ftrack.unmanaged location if thumb or prev
|
||||
if comp.get('thumbnail') or comp.get('preview') \
|
||||
or ("preview" in comp.get('tags', [])) \
|
||||
or ("review" in comp.get('tags', [])) \
|
||||
or ("thumbnail" in comp.get('tags', [])):
|
||||
unmanaged_loc = self.get_ftrack_location(
|
||||
'ftrack.unmanaged', ft_session
|
||||
)
|
||||
|
||||
component_data_src = component_data.copy()
|
||||
name = component_data['name'] + '_src'
|
||||
component_data_src['name'] = name
|
||||
|
||||
component_item_src = {
|
||||
"assettype_data": {
|
||||
"short": asset_type,
|
||||
},
|
||||
"asset_data": {
|
||||
"name": instance.data["subset"],
|
||||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
},
|
||||
"component_data": component_data_src,
|
||||
"component_path": comp['published_path'],
|
||||
'component_location': unmanaged_loc,
|
||||
"component_overwrite": False,
|
||||
"thumbnail": False
|
||||
# Create copy of base comp item and append it
|
||||
review_item = copy.deepcopy(base_component_item)
|
||||
# Change location
|
||||
review_item["component_path"] = repre["published_path"]
|
||||
# Change component data
|
||||
review_item["component_data"] = {
|
||||
# Default component name is "main".
|
||||
"name": "ftrackreview-mp4",
|
||||
"metadata": {
|
||||
"ftr_meta": json.dumps({
|
||||
"frameIn": 0,
|
||||
"frameOut": int(duration),
|
||||
"frameRate": float(fps)
|
||||
})
|
||||
}
|
||||
}
|
||||
# Create copy of item before setting location or changing asset
|
||||
src_components_to_add.append(copy.deepcopy(review_item))
|
||||
if is_first_review_repre:
|
||||
is_first_review_repre = False
|
||||
else:
|
||||
# Add representation name to asset name of "not first" review
|
||||
asset_name = review_item["asset_data"]["name"]
|
||||
review_item["asset_data"]["name"] = "_".join(
|
||||
(asset_name, repre["name"])
|
||||
)
|
||||
not_first_components.append(review_item)
|
||||
|
||||
componentList.append(component_item_src)
|
||||
# Set location
|
||||
review_item["component_location"] = ftrack_server_location
|
||||
# Add item to component list
|
||||
component_list.append(review_item)
|
||||
|
||||
self.log.debug('componentsList: {}'.format(str(componentList)))
|
||||
instance.data["ftrackComponentsList"] = componentList
|
||||
# Duplicate thumbnail component for all not first reviews
|
||||
if first_thumbnail_component is not None:
|
||||
for component_item in not_first_components:
|
||||
asset_name = component_item["asset_data"]["name"]
|
||||
new_thumbnail_component = copy.deepcopy(
|
||||
first_thumbnail_component
|
||||
)
|
||||
new_thumbnail_component["asset_data"]["name"] = asset_name
|
||||
new_thumbnail_component["component_location"] = (
|
||||
ftrack_server_location
|
||||
)
|
||||
component_list.append(new_thumbnail_component)
|
||||
|
||||
def get_ftrack_location(self, name, session):
|
||||
if name in self.ftrack_locations:
|
||||
return self.ftrack_locations[name]
|
||||
# Add source components for review and thubmnail components
|
||||
for copy_src_item in src_components_to_add:
|
||||
# Make sure thumbnail is disabled
|
||||
copy_src_item["thumbnail"] = False
|
||||
# Set location
|
||||
copy_src_item["component_location"] = unmanaged_location
|
||||
# Modify name of component to have suffix "_src"
|
||||
component_data = copy_src_item["component_data"]
|
||||
component_name = component_data["name"]
|
||||
component_data["name"] = component_name + "_src"
|
||||
component_list.append(copy_src_item)
|
||||
|
||||
location = session.query(
|
||||
'Location where name is "{}"'.format(name)
|
||||
).one()
|
||||
self.ftrack_locations[name] = location
|
||||
return location
|
||||
# Add others representations as component
|
||||
for repre in other_representations:
|
||||
published_path = repre.get("published_path")
|
||||
if not published_path:
|
||||
continue
|
||||
# Create copy of base comp item and append it
|
||||
other_item = copy.deepcopy(base_component_item)
|
||||
other_item["component_data"] = {
|
||||
"name": repre["name"]
|
||||
}
|
||||
other_item["component_location"] = unmanaged_location
|
||||
other_item["component_path"] = published_path
|
||||
component_list.append(other_item)
|
||||
|
||||
def json_obj_parser(obj):
|
||||
return str(obj)
|
||||
|
||||
self.log.debug("Components list: {}".format(
|
||||
json.dumps(
|
||||
component_list,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
default=json_obj_parser
|
||||
)
|
||||
))
|
||||
instance.data["ftrackComponentsList"] = component_list
|
||||
|
|
|
|||
|
|
@ -148,12 +148,27 @@ class OpenPypeContextSelector:
|
|||
for k, v in env.items():
|
||||
print(" {}: {}".format(k, v))
|
||||
|
||||
publishing_paths = [os.path.join(self.job.imageDir,
|
||||
os.path.dirname(
|
||||
self.job.imageFileName))]
|
||||
|
||||
# add additional channels
|
||||
channel_idx = 0
|
||||
channel = self.job.channelFileName(channel_idx)
|
||||
while channel:
|
||||
channel_path = os.path.dirname(
|
||||
os.path.join(self.job.imageDir, channel))
|
||||
if channel_path not in publishing_paths:
|
||||
publishing_paths.append(channel_path)
|
||||
channel_idx += 1
|
||||
channel = self.job.channelFileName(channel_idx)
|
||||
|
||||
args = [os.path.join(self.openpype_root, self.openpype_executable),
|
||||
'publish', '-t', "rr_control", "--gui",
|
||||
os.path.join(self.job.imageDir,
|
||||
os.path.dirname(self.job.imageFileName))
|
||||
'publish', '-t', "rr_control", "--gui"
|
||||
]
|
||||
|
||||
args += publishing_paths
|
||||
|
||||
print(">>> running {}".format(" ".join(args)))
|
||||
orig = os.environ.copy()
|
||||
orig.update(env)
|
||||
|
|
|
|||
|
|
@ -421,7 +421,8 @@ class SFTPHandler(AbstractProvider):
|
|||
|
||||
try:
|
||||
return pysftp.Connection(**conn_params)
|
||||
except paramiko.ssh_exception.SSHException:
|
||||
except (paramiko.ssh_exception.SSHException,
|
||||
pysftp.exceptions.ConnectionException):
|
||||
log.warning("Couldn't connect", exc_info=True)
|
||||
|
||||
def _mark_progress(self, collection, file, representation, server, site,
|
||||
|
|
|
|||
|
|
@ -1574,6 +1574,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
Use 'force' to remove existing or raises ValueError
|
||||
"""
|
||||
reseted_existing = False
|
||||
for repre_file in representation.pop().get("files"):
|
||||
if file_id and file_id != repre_file["_id"]:
|
||||
continue
|
||||
|
|
@ -1584,12 +1585,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self._reset_site_for_file(collection, query,
|
||||
elem, repre_file["_id"],
|
||||
site_name)
|
||||
return
|
||||
reseted_existing = True
|
||||
else:
|
||||
msg = "Site {} already present".format(site_name)
|
||||
log.info(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
if reseted_existing:
|
||||
return
|
||||
|
||||
if not file_id:
|
||||
update = {
|
||||
"$push": {"files.$[].sites": elem}
|
||||
|
|
|
|||
|
|
@ -785,7 +785,7 @@ class InstanceListView(AbstractInstanceView):
|
|||
group_index = self._instance_model.index(
|
||||
group_item.row(), group_item.column()
|
||||
)
|
||||
proxy_index = self.mapFromSource(group_index)
|
||||
proxy_index = self._proxy_model.mapFromSource(group_index)
|
||||
self._instance_view.setExpanded(proxy_index, expanded)
|
||||
|
||||
def _on_group_toggle_request(self, group_name, state):
|
||||
|
|
@ -810,6 +810,6 @@ class InstanceListView(AbstractInstanceView):
|
|||
|
||||
self._change_active_instances(instance_ids, active)
|
||||
|
||||
proxy_index = self.mapFromSource(group_item.index())
|
||||
proxy_index = self._proxy_model.mapFromSource(group_item.index())
|
||||
if not self._instance_view.isExpanded(proxy_index):
|
||||
self._instance_view.expand(proxy_index)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue