Merge branch 'develop' into feature/OP-3610_flame-shot-frame-range-un-retimed-switch

This commit is contained in:
Jakub Jezek 2022-09-09 14:07:55 +02:00
commit 96c6a433f0
No known key found for this signature in database
GPG key ID: 730D7C02726179A7
32 changed files with 517 additions and 430 deletions

View file

@ -1,8 +1,6 @@
import os
from openpype.lib import (
PreLaunchHook,
create_workdir_extra_folders
)
from openpype.lib import PreLaunchHook
from openpype.pipeline.workfile import create_workdir_extra_folders
class AddLastWorkfileToLaunchArgs(PreLaunchHook):

View file

@ -1,113 +0,0 @@
import os
import collections
from pprint import pformat
import pyblish.api
from openpype.client import (
get_subsets,
get_last_versions,
get_representations
)
from openpype.pipeline import legacy_io
class AppendCelactionAudio(pyblish.api.ContextPlugin):
label = "Colect Audio for publishing"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
self.log.info('Collecting Audio Data')
asset_doc = context.data["assetEntity"]
# get all available representations
subsets = self.get_subsets(
asset_doc,
representations=["audio", "wav"]
)
self.log.info(f"subsets is: {pformat(subsets)}")
if not subsets.get("audioMain"):
raise AttributeError("`audioMain` subset does not exist")
reprs = subsets.get("audioMain", {}).get("representations", [])
self.log.info(f"reprs is: {pformat(reprs)}")
repr = next((r for r in reprs), None)
if not repr:
raise "Missing `audioMain` representation"
self.log.info(f"representation is: {repr}")
audio_file = repr.get('data', {}).get('path', "")
if os.path.exists(audio_file):
context.data["audioFile"] = audio_file
self.log.info(
'audio_file: {}, has been added to context'.format(audio_file))
else:
self.log.warning("Couldn't find any audio file on Ftrack.")
def get_subsets(self, asset_doc, representations):
"""
Query subsets with filter on name.
The method will return all found subsets and its defined version
and subsets. Version could be specified with number. Representation
can be filtered.
Arguments:
asset_doct (dict): Asset (shot) mongo document
representations (list): list for all representations
Returns:
dict: subsets with version and representations in keys
"""
# Query all subsets for asset
project_name = legacy_io.active_project()
subset_docs = get_subsets(
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
)
# Collect all subset ids
subset_ids = [
subset_doc["_id"]
for subset_doc in subset_docs
]
# Check if we found anything
assert subset_ids, (
"No subsets found. Check correct filter. "
"Try this for start `r'.*'`: asset: `{}`"
).format(asset_doc["name"])
last_versions_by_subset_id = get_last_versions(
project_name, subset_ids, fields=["_id", "parent"]
)
version_docs_by_id = {}
for version_doc in last_versions_by_subset_id.values():
version_docs_by_id[version_doc["_id"]] = version_doc
repre_docs = get_representations(
project_name,
version_ids=version_docs_by_id.keys(),
representation_names=representations
)
repre_docs_by_version_id = collections.defaultdict(list)
for repre_doc in repre_docs:
version_id = repre_doc["parent"]
repre_docs_by_version_id[version_id].append(repre_doc)
output_dict = {}
for version_id, repre_docs in repre_docs_by_version_id.items():
version_doc = version_docs_by_id[version_id]
subset_id = version_doc["parent"]
subset_doc = last_versions_by_subset_id[subset_id]
# Store queried docs by subset name
output_dict[subset_doc["name"]] = {
"representations": repre_docs,
"version": version_doc
}
return output_dict

View file

@ -318,10 +318,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
@staticmethod
def create_otio_time_range_from_timeline_item_data(track_item):
speed = track_item.playbackSpeed()
timeline = phiero.get_current_sequence()
frame_start = int(track_item.timelineIn())
frame_duration = int((track_item.duration() - 1) / speed)
frame_duration = int(track_item.duration())
fps = timeline.framerate().toFloat()
return hiero_export.create_otio_time_range(

View file

@ -70,7 +70,7 @@ class CollectAssembly(pyblish.api.InstancePlugin):
data[representation_id].append(instance_data)
instance.data["scenedata"] = dict(data)
instance.data["hierarchy"] = list(set(hierarchy_nodes))
instance.data["nodesHierarchy"] = list(set(hierarchy_nodes))
def get_file_rule(self, rule):
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))

View file

@ -33,7 +33,7 @@ class ExtractAssembly(openpype.api.Extractor):
json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
self.log.info("Extracting point cache ..")
cmds.select(instance.data["hierarchy"])
cmds.select(instance.data["nodesHierarchy"])
# Run basic alembic exporter
extract_alembic(file=hierarchy_path,

View file

@ -48,7 +48,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin):
from openpype.hosts.maya.api import lib
# Get all transforms in the loaded containers
container_roots = cmds.listRelatives(instance.data["hierarchy"],
container_roots = cmds.listRelatives(instance.data["nodesHierarchy"],
children=True,
type="transform",
fullPath=True)

View file

@ -201,34 +201,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
if not instance.data["review"]:
instance.data["useSequenceForReview"] = False
project_name = legacy_io.active_project()
asset_name = instance.data["asset"]
# * Add audio to instance if exists.
# Find latest versions document
last_version_doc = get_last_version_by_subset_name(
project_name, "audioMain", asset_name=asset_name, fields=["_id"]
)
repre_doc = None
if last_version_doc:
# Try to find it's representation (Expected there is only one)
repre_docs = list(get_representations(
project_name, version_ids=[last_version_doc["_id"]]
))
if not repre_docs:
self.log.warning(
"Version document does not contain any representations"
)
else:
repre_doc = repre_docs[0]
# Add audio to instance if representation was found
if repre_doc:
instance.data["audio"] = [{
"offset": 0,
"filename": get_representation_path(repre_doc)
}]
self.log.debug("instance.data: {}".format(pformat(instance.data)))
def is_prerender(self, families):

View file

@ -1,21 +1,60 @@
import os
import re
import abc
import json
import logging
import six
import platform
import functools
import warnings
import clique
from openpype.client import get_project
from openpype.settings import get_project_settings
from .profiles_filtering import filter_profiles
log = logging.getLogger(__name__)
class PathToolsDeprecatedWarning(DeprecationWarning):
pass
def deprecated(new_destination):
"""Mark functions as deprecated.
It will result in a warning being emitted when the function is used.
"""
func = None
if callable(new_destination):
func = new_destination
new_destination = None
def _decorator(decorated_func):
if new_destination is None:
warning_message = (
" Please check content of deprecated function to figure out"
" possible replacement."
)
else:
warning_message = " Please replace your usage with '{}'.".format(
new_destination
)
@functools.wraps(decorated_func)
def wrapper(*args, **kwargs):
warnings.simplefilter("always", PathToolsDeprecatedWarning)
warnings.warn(
(
"Call to deprecated function '{}'"
"\nFunction was moved or removed.{}"
).format(decorated_func.__name__, warning_message),
category=PathToolsDeprecatedWarning,
stacklevel=4
)
return decorated_func(*args, **kwargs)
return wrapper
if func is None:
return _decorator
return _decorator(func)
def format_file_size(file_size, suffix=None):
"""Returns formatted string with size in appropriate unit.
@ -232,107 +271,69 @@ def get_last_version_from_path(path_dir, filter):
return None
@deprecated("openpype.pipeline.project_folders.concatenate_splitted_paths")
def concatenate_splitted_paths(split_paths, anatomy):
pattern_array = re.compile(r"\[.*\]")
output = []
for path_items in split_paths:
clean_items = []
if isinstance(path_items, str):
path_items = [path_items]
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
for path_item in path_items:
if not re.match(r"{.+}", path_item):
path_item = re.sub(pattern_array, "", path_item)
clean_items.append(path_item)
from openpype.pipeline.project_folders import concatenate_splitted_paths
# backward compatibility
if "__project_root__" in path_items:
for root, root_path in anatomy.roots.items():
if not os.path.exists(str(root_path)):
log.debug("Root {} path path {} not exist on \
computer!".format(root, root_path))
continue
clean_items = ["{{root[{}]}}".format(root),
r"{project[name]}"] + clean_items[1:]
output.append(os.path.normpath(os.path.sep.join(clean_items)))
continue
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
return concatenate_splitted_paths(split_paths, anatomy)
@deprecated
def get_format_data(anatomy):
project_doc = get_project(anatomy.project_name, fields=["data.code"])
project_code = project_doc["data"]["code"]
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
return {
"root": anatomy.roots,
"project": {
"name": anatomy.project_name,
"code": project_code
},
}
from openpype.pipeline.template_data import get_project_template_data
data = get_project_template_data(project_name=anatomy.project_name)
data["root"] = anatomy.roots
return data
@deprecated("openpype.pipeline.project_folders.fill_paths")
def fill_paths(path_list, anatomy):
format_data = get_format_data(anatomy)
filled_paths = []
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
for path in path_list:
new_path = path.format(**format_data)
filled_paths.append(new_path)
from openpype.pipeline.project_folders import fill_paths
return filled_paths
return fill_paths(path_list, anatomy)
@deprecated("openpype.pipeline.project_folders.create_project_folders")
def create_project_folders(basic_paths, project_name):
from openpype.pipeline import Anatomy
anatomy = Anatomy(project_name)
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
filled_paths = fill_paths(concat_paths, anatomy)
from openpype.pipeline.project_folders import create_project_folders
# Create folders
for path in filled_paths:
if os.path.exists(path):
log.debug("Folder already exists: {}".format(path))
else:
log.debug("Creating folder: {}".format(path))
os.makedirs(path)
def _list_path_items(folder_structure):
output = []
for key, value in folder_structure.items():
if not value:
output.append(key)
else:
paths = _list_path_items(value)
for path in paths:
if not isinstance(path, (list, tuple)):
path = [path]
item = [key]
item.extend(path)
output.append(item)
return output
return create_project_folders(project_name, basic_paths)
@deprecated("openpype.pipeline.project_folders.get_project_basic_paths")
def get_project_basic_paths(project_name):
project_settings = get_project_settings(project_name)
folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not folder_structure:
return []
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
if isinstance(folder_structure, str):
folder_structure = json.loads(folder_structure)
return _list_path_items(folder_structure)
from openpype.pipeline.project_folders import get_project_basic_paths
return get_project_basic_paths(project_name)
@deprecated("openpype.pipeline.workfile.create_workdir_extra_folders")
def create_workdir_extra_folders(
workdir, host_name, task_type, task_name, project_name,
project_settings=None
@ -349,37 +350,18 @@ def create_workdir_extra_folders(
project_name (str): Name of project on which task is.
project_settings (dict): Prepared project settings. Are loaded if not
passed.
Deprecated:
Function will be removed after release version 3.16.*
"""
# Load project settings if not set
if not project_settings:
project_settings = get_project_settings(project_name)
# Load extra folders profiles
extra_folders_profiles = (
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
from openpype.pipeline.project_folders import create_workdir_extra_folders
return create_workdir_extra_folders(
workdir,
host_name,
task_type,
task_name,
project_name,
project_settings
)
# Skip if are empty
if not extra_folders_profiles:
return
# Prepare profiles filters
filter_data = {
"task_types": task_type,
"task_names": task_name,
"hosts": host_name
}
profile = filter_profiles(extra_folders_profiles, filter_data)
if profile is None:
return
for subfolder in profile["folders"]:
# Make sure backslashes are converted to forwards slashes
# and does not start with slash
subfolder = subfolder.replace("\\", "/").lstrip("/")
# Skip empty strings
if not subfolder:
continue
fullpath = os.path.join(workdir, subfolder)
if not os.path.exists(fullpath):
os.makedirs(fullpath)

View file

@ -3,7 +3,6 @@
import os
import logging
import re
import json
import warnings
import functools

View file

@ -1,7 +1,10 @@
import re
from openpype.pipeline.project_folders import (
get_project_basic_paths,
create_project_folders,
)
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import get_project_basic_paths, create_project_folders
class CreateProjectFolders(BaseAction):
@ -81,7 +84,7 @@ class CreateProjectFolders(BaseAction):
}
# Invoking OpenPype API to create the project folders
create_project_folders(basic_paths, project_name)
create_project_folders(project_name, basic_paths)
self.create_ftrack_entities(basic_paths, project_entity)
self.trigger_event(

View file

@ -1,5 +1,8 @@
"""Loads publishing context from json and continues in publish process.
Should run before 'CollectAnatomyContextData' so the user on context is
changed before it's stored to context anatomy data or instance anatomy data.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
@ -13,7 +16,7 @@ import os
import pyblish.api
class CollectUsername(pyblish.api.ContextPlugin):
class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
"""
Translates user email to Ftrack username.
@ -32,10 +35,8 @@ class CollectUsername(pyblish.api.ContextPlugin):
hosts = ["webpublisher", "photoshop"]
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
_context = None
def process(self, context):
self.log.info("CollectUsername")
self.log.info("{}".format(self.__class__.__name__))
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
@ -54,12 +55,14 @@ class CollectUsername(pyblish.api.ContextPlugin):
return
session = ftrack_api.Session(auto_connect_event_hub=False)
user = session.query("User where email like '{}'".format(user_email))
user = session.query(
"User where email like '{}'".format(user_email)
).first()
if not user:
raise ValueError(
"Couldn't find user with {} email".format(user_email))
user = user[0]
username = user.get("username")
self.log.debug("Resolved ftrack username:: {}".format(username))
os.environ["FTRACK_API_USER"] = username
@ -67,5 +70,4 @@ class CollectUsername(pyblish.api.ContextPlugin):
burnin_name = username
if '@' in burnin_name:
burnin_name = burnin_name[:burnin_name.index('@')]
os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name
context.data["user"] = burnin_name

View file

@ -166,50 +166,21 @@ def update_op_assets(
# Substitute item type for general classification (assets or shots)
if item_type in ["Asset", "AssetType"]:
substitute_item_type = "assets"
entity_root_asset_name = "Assets"
elif item_type in ["Episode", "Sequence"]:
substitute_item_type = "shots"
else:
substitute_item_type = f"{item_type.lower()}s"
entity_parent_folders = [
f
for f in project_module_settings["entities_root"]
.get(substitute_item_type)
.split("/")
if f
]
entity_root_asset_name = "Shots"
# Root parent folder if exist
visual_parent_doc_id = (
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
)
if visual_parent_doc_id is None:
# Find root folder docs
root_folder_docs = get_assets(
# Find root folder doc ("Assets" or "Shots")
root_folder_doc = get_asset_by_name(
project_name,
asset_names=[entity_parent_folders[-1]],
asset_name=entity_root_asset_name,
fields=["_id", "data.root_of"],
)
# NOTE: Not sure why it's checking for entity type?
# OP3 does not support multiple assets with same names so type
# filtering is irelevant.
# This way mimics previous implementation:
# ```
# root_folder_doc = dbcon.find_one(
# {
# "type": "asset",
# "name": entity_parent_folders[-1],
# "data.root_of": substitute_item_type,
# },
# ["_id"],
# )
# ```
root_folder_doc = None
for folder_doc in root_folder_docs:
root_of = folder_doc.get("data", {}).get("root_of")
if root_of == substitute_item_type:
root_folder_doc = folder_doc
break
if root_folder_doc:
visual_parent_doc_id = root_folder_doc["_id"]
@ -240,7 +211,7 @@ def update_op_assets(
item_name = item["name"]
# Set root folders parents
item_data["parents"] = entity_parent_folders + item_data["parents"]
item_data["parents"] = [entity_root_asset_name] + item_data["parents"]
# Update 'data' different in zou DB
updated_data = {
@ -318,13 +289,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
)
def sync_all_projects(login: str, password: str):
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
"""Update all OP projects in DB with Zou data.
Args:
login (str): Kitsu user login
password (str): Kitsu user password
ignore_projects (list): List of unsynced project names
Raises:
gazu.exception.AuthFailedException: Wrong user login and/or password
"""
@ -340,6 +311,8 @@ def sync_all_projects(login: str, password: str):
dbcon.install()
all_projects = gazu.project.all_open_projects()
for project in all_projects:
if ignore_projects and project["name"] in ignore_projects:
continue
sync_project_from_kitsu(dbcon, project)
@ -396,54 +369,30 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
zou_ids_and_asset_docs[project["id"]] = project_doc
# Create entities root folders
project_module_settings = get_project_settings(project_name)["kitsu"]
for entity_type, root in project_module_settings["entities_root"].items():
parent_folders = root.split("/")
direct_parent_doc = None
for i, folder in enumerate(parent_folders, 1):
parent_doc = get_asset_by_name(
project_name, folder, fields=["_id", "data.root_of"]
)
# NOTE: Not sure why it's checking for entity type?
# OP3 does not support multiple assets with same names so type
# filtering is irelevant.
# Also all of the entities could find be queried at once using
# 'get_assets'.
# This way mimics previous implementation:
# ```
# parent_doc = dbcon.find_one(
# {"type": "asset", "name": folder, "data.root_of": entity_type}
# )
# ```
if (
parent_doc
and parent_doc.get("data", {}).get("root_of") != entity_type
):
parent_doc = None
if not parent_doc:
direct_parent_doc = dbcon.insert_one(
{
"name": folder,
"type": "asset",
"schema": "openpype:asset-3.0",
"data": {
"root_of": entity_type,
"parents": parent_folders[:i],
"visualParent": direct_parent_doc.inserted_id
if direct_parent_doc
else None,
"tasks": {},
},
}
)
to_insert = [
{
"name": r,
"type": "asset",
"schema": "openpype:asset-3.0",
"data": {
"root_of": r,
"tasks": {},
},
}
for r in ["Assets", "Shots"]
if not get_asset_by_name(
project_name, r, fields=["_id", "data.root_of"]
)
]
# Create
to_insert = [
create_op_asset(item)
for item in all_entities
if item["id"] not in zou_ids_and_asset_docs.keys()
]
to_insert.extend(
[
create_op_asset(item)
for item in all_entities
if item["id"] not in zou_ids_and_asset_docs.keys()
]
)
if to_insert:
# Insert doc in DB
dbcon.insert_many(to_insert)

View file

@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
Reviews might be large, so allow only adding link to message instead of
uploading only.
"""
fill_data = copy.deepcopy(instance.context.data["anatomyData"])
username = fill_data.get("user")
fill_pairs = [
("asset", instance.data.get("asset", fill_data.get("asset"))),
("subset", instance.data.get("subset", fill_data.get("subset"))),
("username", instance.data.get("username",
fill_data.get("username"))),
("user", username),
("username", username),
("app", instance.data.get("app", fill_data.get("app"))),
("family", instance.data.get("family", fill_data.get("family"))),
("version", str(instance.data.get("version",
@ -110,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
if review_path:
fill_pairs.append(("review_filepath", review_path))
task_data = instance.data.get("task")
if not task_data:
task_data = fill_data.get("task")
for key, value in task_data.items():
fill_key = "task[{}]".format(key)
fill_pairs.append((fill_key, value))
fill_pairs.append(("task", task_data["name"]))
task_data = fill_data.get("task")
if task_data:
if (
"{task}" in message_templ
or "{Task}" in message_templ
or "{TASK}" in message_templ
):
fill_pairs.append(("task", task_data["name"]))
else:
for key, value in task_data.items():
fill_key = "task[{}]".format(key)
fill_pairs.append((fill_key, value))
self.log.debug("fill_pairs ::{}".format(fill_pairs))
multiple_case_variants = prepare_template_data(fill_pairs)

View file

@ -0,0 +1,107 @@
import os
import re
import json
import six
from openpype.settings import get_project_settings
from openpype.lib import Logger
from .anatomy import Anatomy
from .template_data import get_project_template_data
def concatenate_splitted_paths(split_paths, anatomy):
log = Logger.get_logger("concatenate_splitted_paths")
pattern_array = re.compile(r"\[.*\]")
output = []
for path_items in split_paths:
clean_items = []
if isinstance(path_items, str):
path_items = [path_items]
for path_item in path_items:
if not re.match(r"{.+}", path_item):
path_item = re.sub(pattern_array, "", path_item)
clean_items.append(path_item)
# backward compatibility
if "__project_root__" in path_items:
for root, root_path in anatomy.roots.items():
if not os.path.exists(str(root_path)):
log.debug("Root {} path path {} not exist on \
computer!".format(root, root_path))
continue
clean_items = ["{{root[{}]}}".format(root),
r"{project[name]}"] + clean_items[1:]
output.append(os.path.normpath(os.path.sep.join(clean_items)))
continue
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def fill_paths(path_list, anatomy):
format_data = get_project_template_data(project_name=anatomy.project_name)
format_data["root"] = anatomy.roots
filled_paths = []
for path in path_list:
new_path = path.format(**format_data)
filled_paths.append(new_path)
return filled_paths
def create_project_folders(project_name, basic_paths=None):
log = Logger.get_logger("create_project_folders")
anatomy = Anatomy(project_name)
if basic_paths is None:
basic_paths = get_project_basic_paths(project_name)
if not basic_paths:
return
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
filled_paths = fill_paths(concat_paths, anatomy)
# Create folders
for path in filled_paths:
if os.path.exists(path):
log.debug("Folder already exists: {}".format(path))
else:
log.debug("Creating folder: {}".format(path))
os.makedirs(path)
def _list_path_items(folder_structure):
output = []
for key, value in folder_structure.items():
if not value:
output.append(key)
continue
paths = _list_path_items(value)
for path in paths:
if not isinstance(path, (list, tuple)):
path = [path]
item = [key]
item.extend(path)
output.append(item)
return output
def get_project_basic_paths(project_name):
project_settings = get_project_settings(project_name)
folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not folder_structure:
return []
if isinstance(folder_structure, six.string_types):
folder_structure = json.loads(folder_structure)
return _list_path_items(folder_structure)

View file

@ -53,7 +53,7 @@ def get_project_template_data(project_doc=None, project_name=None):
project_name = project_doc["name"]
if not project_doc:
project_code = get_project(project_name, fields=["data.code"])
project_doc = get_project(project_name, fields=["data.code"])
project_code = project_doc.get("data", {}).get("code")
return {

View file

@ -9,6 +9,8 @@ from .path_resolving import (
get_custom_workfile_template,
get_custom_workfile_template_by_string_context,
create_workdir_extra_folders,
)
from .build_workfile import BuildWorkfile
@ -26,5 +28,7 @@ __all__ = (
"get_custom_workfile_template",
"get_custom_workfile_template_by_string_context",
"create_workdir_extra_folders",
"BuildWorkfile",
)

View file

@ -467,3 +467,60 @@ def get_custom_workfile_template_by_string_context(
return get_custom_workfile_template(
project_doc, asset_doc, task_name, host_name, anatomy, project_settings
)
def create_workdir_extra_folders(
workdir,
host_name,
task_type,
task_name,
project_name,
project_settings=None
):
"""Create extra folders in work directory based on context.
Args:
workdir (str): Path to workdir where workfiles is stored.
host_name (str): Name of host implementation.
task_type (str): Type of task for which extra folders should be
created.
task_name (str): Name of task for which extra folders should be
created.
project_name (str): Name of project on which task is.
project_settings (dict): Prepared project settings. Are loaded if not
passed.
"""
# Load project settings if not set
if not project_settings:
project_settings = get_project_settings(project_name)
# Load extra folders profiles
extra_folders_profiles = (
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
)
# Skip if are empty
if not extra_folders_profiles:
return
# Prepare profiles filters
filter_data = {
"task_types": task_type,
"task_names": task_name,
"hosts": host_name
}
profile = filter_profiles(extra_folders_profiles, filter_data)
if profile is None:
return
for subfolder in profile["folders"]:
# Make sure backslashes are converted to forwards slashes
# and does not start with slash
subfolder = subfolder.replace("\\", "/").lstrip("/")
# Skip empty strings
if not subfolder:
continue
fullpath = os.path.join(workdir, subfolder)
if not os.path.exists(fullpath):
os.makedirs(fullpath)

View file

@ -0,0 +1,105 @@
import pyblish.api
from openpype.client import (
get_last_version_by_subset_name,
get_representations,
)
from openpype.pipeline import (
legacy_io,
get_representation_path,
)
class CollectAudio(pyblish.api.InstancePlugin):
"""Collect asset's last published audio.
The audio subset name searched for is defined in:
project settings > Collect Audio
"""
label = "Collect Asset Audio"
order = pyblish.api.CollectorOrder + 0.1
families = ["review"]
hosts = [
"nuke",
"maya",
"shell",
"hiero",
"premiere",
"harmony",
"traypublisher",
"standalonepublisher",
"fusion",
"tvpaint",
"resolve",
"webpublisher",
"aftereffects",
"flame",
"unreal"
]
audio_subset_name = "audioMain"
def process(self, instance):
if instance.data.get("audio"):
self.log.info(
"Skipping Audio collecion. It is already collected"
)
return
# Add audio to instance if exists.
self.log.info((
"Searching for audio subset '{subset}'"
" in asset '{asset}'"
).format(
subset=self.audio_subset_name,
asset=instance.data["asset"]
))
repre_doc = self._get_repre_doc(instance)
# Add audio to instance if representation was found
if repre_doc:
instance.data["audio"] = [{
"offset": 0,
"filename": get_representation_path(repre_doc)
}]
self.log.info("Audio Data added to instance ...")
def _get_repre_doc(self, instance):
cache = instance.context.data.get("__cache_asset_audio")
if cache is None:
cache = {}
instance.context.data["__cache_asset_audio"] = cache
asset_name = instance.data["asset"]
# first try to get it from cache
if asset_name in cache:
return cache[asset_name]
project_name = legacy_io.active_project()
# Find latest versions document
last_version_doc = get_last_version_by_subset_name(
project_name,
self.audio_subset_name,
asset_name=asset_name,
fields=["_id"]
)
repre_doc = None
if last_version_doc:
# Try to find it's representation (Expected there is only one)
repre_docs = list(get_representations(
project_name, version_ids=[last_version_doc["_id"]]
))
if not repre_docs:
self.log.warning(
"Version document does not contain any representations"
)
else:
repre_doc = repre_docs[0]
# update cache
cache[asset_name] = repre_doc
return repre_doc

View file

@ -488,12 +488,6 @@ class ExtractBurnin(publish.Extractor):
"frame_end_handle": frame_end_handle
}
# use explicit username for webpublishes as rewriting
# OPENPYPE_USERNAME might have side effects
webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME")
if webpublish_user_name:
burnin_data["username"] = webpublish_user_name
self.log.debug(
"Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4))
)

View file

@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# the database even if not used by the destination template
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "username", "output"
"family", "hierarchy", "username", "user", "output"
]
skip_host_families = []

View file

@ -46,7 +46,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
ignored_representation_names = []
db_representation_context_keys = [
"project", "asset", "task", "subset", "representation",
"family", "hierarchy", "task", "username"
"family", "hierarchy", "task", "username", "user"
]
# QUESTION/TODO this process should happen on server if crashed due to
# permissions error on files (files were used or user didn't have perms)

View file

@ -127,7 +127,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
exclude_families = ["render.farm"]
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "task", "username"
"family", "hierarchy", "task", "username", "user"
]
default_template_name = "publish"

View file

@ -3,6 +3,10 @@
"CollectAnatomyInstanceData": {
"follow_workfile_version": false
},
"CollectAudio": {
"enabled": false,
"audio_subset_name": "audioMain"
},
"CollectSceneVersion": {
"hosts": [
"aftereffects",

View file

@ -1,8 +1,4 @@
{
"entities_root": {
"assets": "Assets",
"shots": "Shots"
},
"entities_naming_pattern": {
"episode": "E##",
"sequence": "SQ##",

View file

@ -5,23 +5,6 @@
"collapsible": true,
"is_file": true,
"children": [
{
"type": "dict",
"key": "entities_root",
"label": "Entities root folder",
"children": [
{
"type": "text",
"key": "assets",
"label": "Assets:"
},
{
"type": "text",
"key": "shots",
"label": "Shots (includes Episodes & Sequences if any):"
}
]
},
{
"type": "dict",
"key": "entities_naming_pattern",

View file

@ -18,6 +18,27 @@
}
]
},
{
"type": "dict",
"collapsible": true,
"checkbox_key": "enabled",
"key": "CollectAudio",
"label": "Collect Audio",
"is_group": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"key": "audio_subset_name",
"label": "Name of audio variant",
"type": "text",
"placeholder": "audioMain"
}
]
},
{
"type": "dict",
"collapsible": true,

View file

@ -1,5 +1,12 @@
from Qt import QtWidgets, QtCore, QtGui
from openpype import resources
from openpype.style import load_stylesheet
from openpype.widgets import PasswordDialog
from openpype.lib import is_admin_password_required, Logger
from openpype.pipeline import AvalonMongoDB
from openpype.pipeline.project_folders import create_project_folders
from . import (
ProjectModel,
ProjectProxyFilter,
@ -13,17 +20,6 @@ from . import (
)
from .widgets import ConfirmProjectDeletion
from .style import ResourceCache
from openpype.style import load_stylesheet
from openpype.lib import is_admin_password_required
from openpype.widgets import PasswordDialog
from openpype.pipeline import AvalonMongoDB
from openpype import resources
from openpype.api import (
get_project_basic_paths,
create_project_folders,
Logger
)
class ProjectManagerWindow(QtWidgets.QWidget):
@ -259,12 +255,8 @@ class ProjectManagerWindow(QtWidgets.QWidget):
qm.Yes | qm.No)
if ans == qm.Yes:
try:
# Get paths based on presets
basic_paths = get_project_basic_paths(project_name)
if not basic_paths:
pass
# Invoking OpenPype API to create the project folders
create_project_folders(basic_paths, project_name)
create_project_folders(project_name)
except Exception as exc:
self.log.warning(
"Cannot create starting folders: {}".format(exc),

View file

@ -34,7 +34,8 @@ from .lib import (
class InventoryModel(TreeModel):
"""The model for the inventory"""
Columns = ["Name", "version", "count", "family", "loader", "objectName"]
Columns = ["Name", "version", "count", "family",
"group", "loader", "objectName"]
OUTDATED_COLOR = QtGui.QColor(235, 30, 30)
CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30)
@ -157,8 +158,13 @@ class InventoryModel(TreeModel):
# Family icon
return item.get("familyIcon", None)
column_name = self.Columns[index.column()]
if column_name == "group" and item.get("group"):
return qtawesome.icon("fa.object-group",
color=get_default_entity_icon_color())
if item.get("isGroupNode"):
column_name = self.Columns[index.column()]
if column_name == "active_site":
provider = item.get("active_site_provider")
return self._site_icons.get(provider)
@ -423,6 +429,7 @@ class InventoryModel(TreeModel):
group_node["familyIcon"] = family_icon
group_node["count"] = len(group_items)
group_node["isGroupNode"] = True
group_node["group"] = subset["data"].get("subsetGroup")
if self.sync_enabled:
progress = get_progress_for_repre(

View file

@ -89,7 +89,8 @@ class SceneInventoryWindow(QtWidgets.QDialog):
view.setColumnWidth(1, 55) # version
view.setColumnWidth(2, 55) # count
view.setColumnWidth(3, 150) # family
view.setColumnWidth(4, 100) # namespace
view.setColumnWidth(4, 120) # group
view.setColumnWidth(5, 150) # loader
# apply delegates
version_delegate = VersionDelegate(legacy_io, self)

View file

@ -9,11 +9,11 @@ import platform
from Qt import QtCore, QtGui, QtWidgets
import openpype.version
from openpype.api import (
resources,
get_system_settings
from openpype import resources, style
from openpype.lib import (
get_openpype_execute_args,
Logger,
)
from openpype.lib import get_openpype_execute_args, Logger
from openpype.lib.openpype_version import (
op_version_control_available,
get_expected_version,
@ -25,8 +25,8 @@ from openpype.lib.openpype_version import (
get_openpype_version,
)
from openpype.modules import TrayModulesManager
from openpype import style
from openpype.settings import (
get_system_settings,
SystemSettings,
ProjectSettings,
DefaultsNotDefined
@ -774,10 +774,24 @@ class PypeTrayStarter(QtCore.QObject):
def main():
log = Logger.get_logger(__name__)
app = QtWidgets.QApplication.instance()
if not app:
app = QtWidgets.QApplication([])
for attr_name in (
"AA_EnableHighDpiScaling",
"AA_UseHighDpiPixmaps"
):
attr = getattr(QtCore.Qt, attr_name, None)
if attr is None:
log.debug((
"Missing QtCore.Qt attribute \"{}\"."
" UI quality may be affected."
).format(attr_name))
else:
app.setAttribute(attr)
starter = PypeTrayStarter(app)
# TODO remove when pype.exe will have an icon

View file

@ -10,10 +10,7 @@ from openpype.host import IWorkfileHost
from openpype.client import get_asset_by_id
from openpype.tools.utils import PlaceholderLineEdit
from openpype.tools.utils.delegates import PrettyTimeDelegate
from openpype.lib import (
emit_event,
create_workdir_extra_folders,
)
from openpype.lib import emit_event
from openpype.pipeline import (
registered_host,
legacy_io,
@ -23,7 +20,10 @@ from openpype.pipeline.context_tools import (
compute_session_changes,
change_current_context
)
from openpype.pipeline.workfile import get_workfile_template_key
from openpype.pipeline.workfile import (
get_workfile_template_key,
create_workdir_extra_folders,
)
from .model import (
WorkAreaFilesModel,