Merge branch 'develop' into main
146
.dockerignore
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
.poetry/
|
||||
.github/
|
||||
vendor/bin/
|
||||
docs/
|
||||
website/
|
||||
82
Dockerfile
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# Build Pype docker image
|
||||
FROM centos:7 AS builder
|
||||
ARG OPENPYPE_PYTHON_VERSION=3.7.10
|
||||
|
||||
LABEL org.opencontainers.image.name="pypeclub/openpype"
|
||||
LABEL org.opencontainers.image.title="OpenPype Docker Image"
|
||||
LABEL org.opencontainers.image.url="https://openpype.io/"
|
||||
LABEL org.opencontainers.image.source="https://github.com/pypeclub/pype"
|
||||
|
||||
USER root
|
||||
|
||||
# update base
|
||||
RUN yum -y install deltarpm \
|
||||
&& yum -y update \
|
||||
&& yum clean all
|
||||
|
||||
# add tools we need
|
||||
RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm \
|
||||
&& yum -y install centos-release-scl \
|
||||
&& yum -y install \
|
||||
bash \
|
||||
which \
|
||||
git \
|
||||
devtoolset-7-gcc* \
|
||||
make \
|
||||
cmake \
|
||||
curl \
|
||||
wget \
|
||||
gcc \
|
||||
zlib-devel \
|
||||
bzip2 \
|
||||
bzip2-devel \
|
||||
readline-devel \
|
||||
sqlite sqlite-devel \
|
||||
openssl-devel \
|
||||
tk-devel libffi-devel \
|
||||
qt5-qtbase-devel \
|
||||
patchelf \
|
||||
&& yum clean all
|
||||
|
||||
RUN mkdir /opt/openpype
|
||||
# RUN useradd -m pype
|
||||
# RUN chown pype /opt/openpype
|
||||
# USER pype
|
||||
|
||||
RUN curl https://pyenv.run | bash
|
||||
ENV PYTHON_CONFIGURE_OPTS --enable-shared
|
||||
|
||||
RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \
|
||||
&& echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc
|
||||
RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
COPY . /opt/openpype/
|
||||
RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet."
|
||||
# USER root
|
||||
# RUN chown -R pype /opt/openpype
|
||||
RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh
|
||||
|
||||
# USER pype
|
||||
|
||||
WORKDIR /opt/openpype
|
||||
|
||||
RUN cd /opt/openpype \
|
||||
&& source $HOME/.bashrc \
|
||||
&& pyenv local ${OPENPYPE_PYTHON_VERSION}
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& ./tools/create_env.sh
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& ./tools/fetch_thirdparty_libs.sh
|
||||
|
||||
RUN source $HOME/.bashrc \
|
||||
&& bash ./tools/build.sh \
|
||||
&& cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \
|
||||
&& cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \
|
||||
&& cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib
|
||||
|
||||
RUN cd /opt/openpype \
|
||||
rm -rf ./vendor/bin
|
||||
|
|
@ -47,6 +47,10 @@ class CreateRender(openpype.api.Creator):
|
|||
|
||||
self.data["members"] = [item.id]
|
||||
self.data["uuid"] = item.id # for SubsetManager
|
||||
self.data["subset"] = self.data["subset"]\
|
||||
.replace(stub.PUBLISH_ICON, '')\
|
||||
.replace(stub.LOADED_ICON, '')
|
||||
|
||||
stub.imprint(item, self.data)
|
||||
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
|
||||
stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])
|
||||
|
|
|
|||
|
|
@ -68,7 +68,11 @@ def get_rate(item):
|
|||
return None
|
||||
|
||||
num, den = item.framerate().toRational()
|
||||
rate = float(num) / float(den)
|
||||
|
||||
try:
|
||||
rate = float(num) / float(den)
|
||||
except ZeroDivisionError:
|
||||
return None
|
||||
|
||||
if rate.is_integer():
|
||||
return rate
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
for track_item in selected_timeline_items:
|
||||
|
||||
data = dict()
|
||||
data = {}
|
||||
clip_name = track_item.name()
|
||||
|
||||
# get openpype tag data
|
||||
|
|
@ -43,6 +43,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
tag_data["handleEnd"] = min(
|
||||
tag_data["handleEnd"], int(track_item.handleOutLength()))
|
||||
|
||||
# add audio to families
|
||||
with_audio = False
|
||||
if tag_data.pop("audio"):
|
||||
with_audio = True
|
||||
|
||||
# add tag data to instance data
|
||||
data.update({
|
||||
k: v for k, v in tag_data.items()
|
||||
|
|
@ -94,6 +99,17 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
self.log.debug(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
if not with_audio:
|
||||
return
|
||||
|
||||
# create audio subset instance
|
||||
self.create_audio_instance(context, **data)
|
||||
|
||||
# add audioReview attribute to plate instance data
|
||||
# if reviewTrack is on
|
||||
if tag_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
|
||||
def get_resolution_to_data(self, data, context):
|
||||
assert data.get("otioClip"), "Missing `otioClip` data"
|
||||
|
||||
|
|
@ -159,6 +175,46 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
self.log.debug(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
def create_audio_instance(self, context, **data):
|
||||
master_layer = data.get("heroTrack")
|
||||
|
||||
if not master_layer:
|
||||
return
|
||||
|
||||
asset = data.get("asset")
|
||||
item = data.get("item")
|
||||
clip_name = item.name()
|
||||
|
||||
asset = data["asset"]
|
||||
subset = "audioMain"
|
||||
|
||||
# insert family into families
|
||||
family = "audio"
|
||||
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label += " ({}) ".format(clip_name)
|
||||
label += " {}".format(subset)
|
||||
label += " [{}]".format(family)
|
||||
|
||||
data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"subset": subset,
|
||||
"asset": asset,
|
||||
"family": family,
|
||||
"families": ["clip"]
|
||||
})
|
||||
# remove review track attr if any
|
||||
data.pop("reviewTrack")
|
||||
|
||||
# create instance
|
||||
instance = context.create_instance(**data)
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.debug(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
def get_otio_clip_instance_data(self, otio_timeline, track_item):
|
||||
"""
|
||||
Return otio objects for timeline, track and clip
|
||||
|
|
|
|||
|
|
@ -1124,16 +1124,14 @@ def get_id_required_nodes(referenced_nodes=False, nodes=None):
|
|||
|
||||
|
||||
def get_id(node):
|
||||
"""
|
||||
Get the `cbId` attribute of the given node
|
||||
"""Get the `cbId` attribute of the given node.
|
||||
|
||||
Args:
|
||||
node (str): the name of the node to retrieve the attribute from
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
if node is None:
|
||||
return
|
||||
|
||||
|
|
@ -2688,3 +2686,69 @@ def show_message(title, msg):
|
|||
pass
|
||||
else:
|
||||
message_window.message(title=title, message=msg, parent=parent)
|
||||
|
||||
|
||||
def iter_shader_edits(relationships, shader_nodes, nodes_by_id, label=None):
|
||||
"""Yield edits as a set of actions."""
|
||||
|
||||
attributes = relationships.get("attributes", [])
|
||||
shader_data = relationships.get("relationships", {})
|
||||
|
||||
shading_engines = cmds.ls(shader_nodes, type="objectSet", long=True)
|
||||
assert shading_engines, "Error in retrieving objectSets from reference"
|
||||
|
||||
# region compute lookup
|
||||
shading_engines_by_id = defaultdict(list)
|
||||
for shad in shading_engines:
|
||||
shading_engines_by_id[get_id(shad)].append(shad)
|
||||
# endregion
|
||||
|
||||
# region assign shading engines and other sets
|
||||
for data in shader_data.values():
|
||||
# collect all unique IDs of the set members
|
||||
shader_uuid = data["uuid"]
|
||||
member_uuids = [
|
||||
(member["uuid"], member.get("components"))
|
||||
for member in data["members"]]
|
||||
|
||||
filtered_nodes = list()
|
||||
for _uuid, components in member_uuids:
|
||||
nodes = nodes_by_id.get(_uuid, None)
|
||||
if nodes is None:
|
||||
continue
|
||||
|
||||
if components:
|
||||
# Assign to the components
|
||||
nodes = [".".join([node, components]) for node in nodes]
|
||||
|
||||
filtered_nodes.extend(nodes)
|
||||
|
||||
id_shading_engines = shading_engines_by_id[shader_uuid]
|
||||
if not id_shading_engines:
|
||||
log.error("{} - No shader found with cbId "
|
||||
"'{}'".format(label, shader_uuid))
|
||||
continue
|
||||
elif len(id_shading_engines) > 1:
|
||||
log.error("{} - Skipping shader assignment. "
|
||||
"More than one shader found with cbId "
|
||||
"'{}'. (found: {})".format(label, shader_uuid,
|
||||
id_shading_engines))
|
||||
continue
|
||||
|
||||
if not filtered_nodes:
|
||||
log.warning("{} - No nodes found for shading engine "
|
||||
"'{}'".format(label, id_shading_engines[0]))
|
||||
continue
|
||||
|
||||
yield {"action": "assign",
|
||||
"uuid": data["uuid"],
|
||||
"nodes": filtered_nodes,
|
||||
"shader": id_shading_engines[0]}
|
||||
|
||||
for data in attributes:
|
||||
nodes = nodes_by_id.get(data["uuid"], [])
|
||||
attr_value = data["attributes"]
|
||||
yield {"action": "setattr",
|
||||
"uuid": data["uuid"],
|
||||
"nodes": nodes,
|
||||
"attributes": attr_value}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Look loader."""
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from avalon import api, io
|
||||
import json
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
"rig",
|
||||
"camerarig"]
|
||||
representations = ["ma", "abc", "fbx", "mb"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -1,12 +1,21 @@
|
|||
from avalon.maya import lib
|
||||
from avalon import api
|
||||
from openpype.api import get_project_settings
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Loader for Vray Proxy files.
|
||||
|
||||
If there are Alembics published along vray proxy (in the same version),
|
||||
loader will use them instead of native vray vrmesh format.
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
from avalon.maya import lib
|
||||
from avalon import api, io
|
||||
from openpype.api import get_project_settings
|
||||
|
||||
|
||||
class VRayProxyLoader(api.Loader):
|
||||
"""Load VRayMesh proxy"""
|
||||
"""Load VRay Proxy with Alembic or VrayMesh."""
|
||||
|
||||
families = ["vrayproxy"]
|
||||
representations = ["vrmesh"]
|
||||
|
|
@ -16,8 +25,17 @@ class VRayProxyLoader(api.Loader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
# type: (dict, str, str, dict) -> None
|
||||
"""Loader entry point.
|
||||
|
||||
Args:
|
||||
context (dict): Loaded representation context.
|
||||
name (str): Name of container.
|
||||
namespace (str): Optional namespace name.
|
||||
options (dict): Optional loader options.
|
||||
|
||||
"""
|
||||
from avalon.maya.pipeline import containerise
|
||||
from openpype.hosts.maya.api.lib import namespaced
|
||||
|
||||
|
|
@ -26,6 +44,9 @@ class VRayProxyLoader(api.Loader):
|
|||
except ValueError:
|
||||
family = "vrayproxy"
|
||||
|
||||
# get all representations for this version
|
||||
self.fname = self._get_abc(context["version"]["_id"]) or self.fname
|
||||
|
||||
asset_name = context['asset']["name"]
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset_name + "_",
|
||||
|
|
@ -39,8 +60,8 @@ class VRayProxyLoader(api.Loader):
|
|||
with lib.maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
nodes, group_node = self.create_vray_proxy(name,
|
||||
filename=self.fname)
|
||||
nodes, group_node = self.create_vray_proxy(
|
||||
name, filename=self.fname)
|
||||
|
||||
self[:] = nodes
|
||||
if not nodes:
|
||||
|
|
@ -63,7 +84,8 @@ class VRayProxyLoader(api.Loader):
|
|||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
# type: (dict, dict) -> None
|
||||
"""Update container with specified representation."""
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
|
|
@ -71,7 +93,8 @@ class VRayProxyLoader(api.Loader):
|
|||
vraymeshes = cmds.ls(members, type="VRayMesh")
|
||||
assert vraymeshes, "Cannot find VRayMesh in container"
|
||||
|
||||
filename = api.get_representation_path(representation)
|
||||
# get all representations for this version
|
||||
filename = self._get_abc(representation["parent"]) or api.get_representation_path(representation) # noqa: E501
|
||||
|
||||
for vray_mesh in vraymeshes:
|
||||
cmds.setAttr("{}.fileName".format(vray_mesh),
|
||||
|
|
@ -84,7 +107,8 @@ class VRayProxyLoader(api.Loader):
|
|||
type="string")
|
||||
|
||||
def remove(self, container):
|
||||
|
||||
# type: (dict) -> None
|
||||
"""Remove loaded container."""
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
|
|
@ -101,61 +125,62 @@ class VRayProxyLoader(api.Loader):
|
|||
"still has members: %s", namespace)
|
||||
|
||||
def switch(self, container, representation):
|
||||
# type: (dict, dict) -> None
|
||||
"""Switch loaded representation."""
|
||||
self.update(container, representation)
|
||||
|
||||
def create_vray_proxy(self, name, filename):
|
||||
# type: (str, str) -> (list, str)
|
||||
"""Re-create the structure created by VRay to support vrmeshes
|
||||
|
||||
Args:
|
||||
name(str): name of the asset
|
||||
name (str): Name of the asset.
|
||||
filename (str): File name of vrmesh.
|
||||
|
||||
Returns:
|
||||
nodes(list)
|
||||
|
||||
"""
|
||||
|
||||
# Create nodes
|
||||
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
|
||||
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
|
||||
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
|
||||
name="{}_VRMM".format(name))
|
||||
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
|
||||
empty=True,
|
||||
renderable=True,
|
||||
noSurfaceShader=True)
|
||||
if name is None:
|
||||
name = os.path.splitext(os.path.basename(filename))[0]
|
||||
|
||||
cmds.setAttr("{}.fileName".format(vray_mesh),
|
||||
filename,
|
||||
type="string")
|
||||
parent = cmds.createNode("transform", name=name)
|
||||
proxy = cmds.createNode(
|
||||
"VRayProxy", name="{}Shape".format(name), parent=parent)
|
||||
cmds.setAttr(proxy + ".fileName", filename, type="string")
|
||||
cmds.connectAttr("time1.outTime", proxy + ".currentFrame")
|
||||
|
||||
# Create important connections
|
||||
cmds.connectAttr("time1.outTime",
|
||||
"{0}.currentFrame".format(vray_mesh))
|
||||
cmds.connectAttr("{}.fileName2".format(vray_mesh),
|
||||
"{}.fileName".format(vray_mat))
|
||||
cmds.connectAttr("{}.instancing".format(vray_mesh),
|
||||
"{}.instancing".format(vray_mat))
|
||||
cmds.connectAttr("{}.output".format(vray_mesh),
|
||||
"{}.inMesh".format(mesh_shape))
|
||||
cmds.connectAttr("{}.overrideFileName".format(vray_mesh),
|
||||
"{}.overrideFileName".format(vray_mat))
|
||||
cmds.connectAttr("{}.currentFrame".format(vray_mesh),
|
||||
"{}.currentFrame".format(vray_mat))
|
||||
return [parent, proxy], parent
|
||||
|
||||
# Set surface shader input
|
||||
cmds.connectAttr("{}.outColor".format(vray_mat),
|
||||
"{}.surfaceShader".format(vray_mat_sg))
|
||||
def _get_abc(self, version_id):
|
||||
# type: (str) -> str
|
||||
"""Get abc representation file path if present.
|
||||
|
||||
# Connect mesh to shader
|
||||
cmds.sets([mesh_shape], addElement=vray_mat_sg)
|
||||
If here is published Alembic (abc) representation published along
|
||||
vray proxy, get is file path.
|
||||
|
||||
group_node = cmds.group(empty=True, name="{}_GRP".format(name))
|
||||
mesh_transform = cmds.listRelatives(mesh_shape,
|
||||
parent=True, fullPath=True)
|
||||
cmds.parent(mesh_transform, group_node)
|
||||
nodes = [vray_mesh, mesh_shape, vray_mat, vray_mat_sg, group_node]
|
||||
Args:
|
||||
version_id (str): Version hash id.
|
||||
|
||||
# Fix: Force refresh so the mesh shows correctly after creation
|
||||
cmds.refresh()
|
||||
cmds.setAttr("{}.geomType".format(vray_mesh), 2)
|
||||
Returns:
|
||||
str: Path to file.
|
||||
None: If abc not found.
|
||||
|
||||
return nodes, group_node
|
||||
"""
|
||||
self.log.debug(
|
||||
"Looking for abc in published representations of this version.")
|
||||
abc_rep = io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"parent": io.ObjectId(version_id),
|
||||
"name": "abc"
|
||||
})
|
||||
|
||||
if abc_rep:
|
||||
self.log.debug("Found, we'll link alembic to vray proxy.")
|
||||
file_name = api.get_representation_path(abc_rep)
|
||||
self.log.debug("File: {}".format(self.fname))
|
||||
return file_name
|
||||
|
||||
return ""
|
||||
|
|
|
|||
18
openpype/hosts/maya/plugins/publish/collect_vrayproxy.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect Vray Proxy."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectVrayProxy(pyblish.api.InstancePlugin):
|
||||
"""Collect Vray Proxy instance.
|
||||
|
||||
Add `pointcache` family for it.
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = 'Collect Vray Proxy'
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Collector entry point."""
|
||||
if not instance.data.get('families'):
|
||||
instance.data["families"] = []
|
||||
|
|
@ -18,7 +18,8 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
label = "Extract Pointcache (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["pointcache",
|
||||
"model"]
|
||||
"model",
|
||||
"vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
|
|||
|
|
@ -26,15 +26,11 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
def process(self, instance):
|
||||
self.log.info("Extracting capture..")
|
||||
|
||||
start = cmds.currentTime(query=True)
|
||||
end = cmds.currentTime(query=True)
|
||||
self.log.info("start: {}, end: {}".format(start, end))
|
||||
|
||||
camera = instance.data['review_camera']
|
||||
|
||||
capture_preset = ""
|
||||
capture_preset = (
|
||||
instance.context.data["project_settings"]['maya']['publish']['ExtractPlayblast']
|
||||
instance.context.data["project_settings"]['maya']['publish']['ExtractPlayblast']['capture_preset']
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
@ -50,8 +46,8 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
# preset['compression'] = "qt"
|
||||
preset['quality'] = 50
|
||||
preset['compression'] = "jpg"
|
||||
preset['start_frame'] = start
|
||||
preset['end_frame'] = end
|
||||
preset['start_frame'] = instance.data["frameStart"]
|
||||
preset['end_frame'] = instance.data["frameStart"]
|
||||
preset['camera_options'] = {
|
||||
"displayGateMask": False,
|
||||
"displayResolution": False,
|
||||
|
|
|
|||
|
|
@ -373,21 +373,16 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
|
||||
prev_node = None
|
||||
with GN:
|
||||
connections = list()
|
||||
if input:
|
||||
input_name = str(input.name()).replace(" ", "")
|
||||
# if connected input node was defined
|
||||
connections.append({
|
||||
"node": input,
|
||||
"inputName": input.name()})
|
||||
prev_node = nuke.createNode(
|
||||
"Input", "name {}".format(input.name()))
|
||||
prev_node.hideControlPanel()
|
||||
"Input", "name {}".format(input_name))
|
||||
else:
|
||||
# generic input node connected to nothing
|
||||
prev_node = nuke.createNode(
|
||||
"Input", "name {}".format("rgba"))
|
||||
prev_node.hideControlPanel()
|
||||
|
||||
prev_node.hideControlPanel()
|
||||
# creating pre-write nodes `prenodes`
|
||||
if prenodes:
|
||||
for name, klass, properties, set_output_to in prenodes:
|
||||
|
|
@ -416,18 +411,12 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
input_node = nuke.createNode(
|
||||
"Input", "name {}".format(node_name))
|
||||
input_node.hideControlPanel()
|
||||
connections.append({
|
||||
"node": nuke.toNode(node_name),
|
||||
"inputName": node_name})
|
||||
now_node.setInput(1, input_node)
|
||||
|
||||
elif isinstance(set_output_to, str):
|
||||
input_node = nuke.createNode(
|
||||
"Input", "name {}".format(node_name))
|
||||
input_node.hideControlPanel()
|
||||
connections.append({
|
||||
"node": nuke.toNode(set_output_to),
|
||||
"inputName": set_output_to})
|
||||
now_node.setInput(0, input_node)
|
||||
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -74,6 +74,9 @@ class CreateImage(openpype.api.Creator):
|
|||
|
||||
for group in groups:
|
||||
long_names = []
|
||||
group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
|
||||
replace(stub.LOADED_ICON, '')
|
||||
|
||||
if group.long_name:
|
||||
for directory in group.long_name[::-1]:
|
||||
name = directory.replace(stub.PUBLISH_ICON, '').\
|
||||
|
|
|
|||
|
|
@ -164,24 +164,26 @@ def create_media_pool_item(fpath: str,
|
|||
# try to search in bin if the clip does not exist
|
||||
existing_mpi = get_media_pool_item(fpath, root_bin)
|
||||
|
||||
print(">>>>> existing_mpi: {}".format(existing_mpi))
|
||||
if not existing_mpi:
|
||||
print("___ fpath: {}".format(fpath))
|
||||
dirname, file = os.path.split(fpath)
|
||||
_name, ext = os.path.splitext(file)
|
||||
print(dirname)
|
||||
media_pool_items = media_storage.AddItemListToMediaPool(os.path.normpath(dirname))
|
||||
print(media_pool_items)
|
||||
# pop the returned dict on first item as resolve data object is such
|
||||
if media_pool_items:
|
||||
media_pool_item = [mpi for mpi in media_pool_items
|
||||
if ext in mpi.GetClipProperty("File Path")]
|
||||
return media_pool_item.pop()
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
if existing_mpi:
|
||||
return existing_mpi
|
||||
|
||||
dirname, file = os.path.split(fpath)
|
||||
_name, ext = os.path.splitext(file)
|
||||
|
||||
# add all data in folder to mediapool
|
||||
media_pool_items = media_storage.AddItemListToMediaPool(
|
||||
os.path.normpath(dirname))
|
||||
|
||||
if not media_pool_items:
|
||||
return False
|
||||
|
||||
# if any are added then look into them for the right extension
|
||||
media_pool_item = [mpi for mpi in media_pool_items
|
||||
if ext in mpi.GetClipProperty("File Path")]
|
||||
|
||||
# return only first found
|
||||
return media_pool_item.pop()
|
||||
|
||||
|
||||
def get_media_pool_item(fpath, root: object = None) -> object:
|
||||
"""
|
||||
|
|
@ -199,7 +201,6 @@ def get_media_pool_item(fpath, root: object = None) -> object:
|
|||
fname = os.path.basename(fpath)
|
||||
|
||||
for _mpi in root.GetClipList():
|
||||
print(">>> _mpi: {}".format(_mpi.GetClipProperty("File Name")))
|
||||
_mpi_name = _mpi.GetClipProperty("File Name")
|
||||
_mpi_name = get_reformated_path(_mpi_name, first=True)
|
||||
if fname in _mpi_name:
|
||||
|
|
@ -312,7 +313,7 @@ def get_current_timeline_items(
|
|||
selecting_color = selecting_color or "Chocolate"
|
||||
project = get_current_project()
|
||||
timeline = get_current_timeline()
|
||||
selected_clips = list()
|
||||
selected_clips = []
|
||||
|
||||
# get all tracks count filtered by track type
|
||||
selected_track_count = timeline.GetTrackCount(track_type)
|
||||
|
|
@ -708,7 +709,7 @@ def get_clip_attributes(clip):
|
|||
"""
|
||||
mp_item = clip.GetMediaPoolItem()
|
||||
|
||||
data = {
|
||||
return {
|
||||
"clipIn": clip.GetStart(),
|
||||
"clipOut": clip.GetEnd(),
|
||||
"clipLeftOffset": clip.GetLeftOffset(),
|
||||
|
|
@ -718,7 +719,6 @@ def get_clip_attributes(clip):
|
|||
"sourceId": mp_item.GetMediaId(),
|
||||
"sourceProperties": mp_item.GetClipProperty()
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def set_project_manager_to_folder_name(folder_name):
|
||||
|
|
@ -850,12 +850,12 @@ def get_reformated_path(path, padded=False, first=False):
|
|||
get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
num_pattern = r"(\[\d+\-\d+\])"
|
||||
padding_pattern = r"(\d+)(?=-)"
|
||||
first_frame_pattern = re.compile(r"\[(\d+)\-\d+\]")
|
||||
|
||||
if "[" in path:
|
||||
padding_pattern = r"(\d+)(?=-)"
|
||||
padding = len(re.findall(padding_pattern, path).pop())
|
||||
num_pattern = r"(\[\d+\-\d+\])"
|
||||
if padded:
|
||||
path = re.sub(num_pattern, f"%0{padding}d", path)
|
||||
elif first:
|
||||
|
|
|
|||
|
|
@ -422,7 +422,6 @@ class ClipLoader:
|
|||
media_pool_item = lib.create_media_pool_item(
|
||||
self.data["path"], self.active_bin)
|
||||
_clip_property = media_pool_item.GetClipProperty
|
||||
clip_name = _clip_property("File Name")
|
||||
|
||||
# get handles
|
||||
handle_start = self.data["versionData"].get("handleStart")
|
||||
|
|
@ -784,6 +783,8 @@ class PublishClip:
|
|||
# add review track only to hero track
|
||||
if hero_track and self.review_layer:
|
||||
self.tag_data.update({"reviewTrack": self.review_layer})
|
||||
else:
|
||||
self.tag_data.update({"reviewTrack": None})
|
||||
|
||||
|
||||
def _solve_tag_hierarchy_data(self, hierarchy_formating_data):
|
||||
|
|
@ -820,7 +821,7 @@ class PublishClip:
|
|||
|
||||
def _create_parents(self):
|
||||
""" Create parents and return it in list. """
|
||||
self.parents = list()
|
||||
self.parents = []
|
||||
|
||||
patern = re.compile(self.parents_search_patern)
|
||||
par_split = [patern.findall(t).pop()
|
||||
|
|
|
|||
|
|
@ -37,8 +37,16 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
media_pool_item = timeline_item.GetMediaPoolItem()
|
||||
clip_property = media_pool_item.GetClipProperty()
|
||||
self.log.debug(f"clip_property: {clip_property}")
|
||||
source_duration = int(media_pool_item.GetClipProperty("Frames"))
|
||||
|
||||
# solve handles length
|
||||
handle_start = min(
|
||||
tag_data["handleStart"], int(timeline_item.GetLeftOffset()))
|
||||
handle_end = min(
|
||||
tag_data["handleEnd"], int(
|
||||
source_duration - timeline_item.GetRightOffset()))
|
||||
|
||||
self.log.debug("Handles: <{}, {}>".format(handle_start, handle_end))
|
||||
|
||||
# add tag data to instance data
|
||||
data.update({
|
||||
|
|
@ -60,7 +68,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
"item": timeline_item,
|
||||
"families": families,
|
||||
"publish": resolve.get_publish_attribute(timeline_item),
|
||||
"fps": context.data["fps"]
|
||||
"fps": context.data["fps"],
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end
|
||||
})
|
||||
|
||||
# otio clip data
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@ from .terminal import Terminal
|
|||
from .execute import (
|
||||
get_pype_execute_args,
|
||||
execute,
|
||||
run_subprocess
|
||||
run_subprocess,
|
||||
CREATE_NO_WINDOW
|
||||
)
|
||||
from .log import PypeLogger, timeit
|
||||
from .mongo import (
|
||||
|
|
@ -112,6 +113,7 @@ from .profiles_filtering import filter_profiles
|
|||
from .plugin_tools import (
|
||||
TaskNotSetError,
|
||||
get_subset_name,
|
||||
prepare_template_data,
|
||||
filter_pyblish_plugins,
|
||||
set_plugin_attributes_from_settings,
|
||||
source_hash,
|
||||
|
|
@ -137,6 +139,7 @@ from .editorial import (
|
|||
trim_media_range,
|
||||
range_from_frames,
|
||||
frames_to_secons,
|
||||
frames_to_timecode,
|
||||
make_sequence_collection
|
||||
)
|
||||
|
||||
|
|
@ -244,5 +247,6 @@ __all__ = [
|
|||
"trim_media_range",
|
||||
"range_from_frames",
|
||||
"frames_to_secons",
|
||||
"frames_to_timecode",
|
||||
"make_sequence_collection"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -137,6 +137,11 @@ def frames_to_secons(frames, framerate):
|
|||
return _ot.to_seconds(rt)
|
||||
|
||||
|
||||
def frames_to_timecode(frames, framerate):
|
||||
rt = _ot.from_frames(frames, framerate)
|
||||
return _ot.to_timecode(rt)
|
||||
|
||||
|
||||
def make_sequence_collection(path, otio_range, metadata):
|
||||
"""
|
||||
Make collection from path otio range and otio metadata.
|
||||
|
|
|
|||
|
|
@ -6,6 +6,9 @@ from .log import PypeLogger as Logger
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# MSDN process creation flag (Windows only)
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
|
||||
|
||||
def execute(args,
|
||||
silent=False,
|
||||
|
|
|
|||
|
|
@ -73,6 +73,23 @@ def get_subset_name(
|
|||
("family", family),
|
||||
("task", task_name)
|
||||
)
|
||||
return template.format(**prepare_template_data(fill_pairs))
|
||||
|
||||
|
||||
def prepare_template_data(fill_pairs):
|
||||
"""
|
||||
Prepares formatted data for filling template.
|
||||
|
||||
It produces mutliple variants of keys (key, Key, KEY) to control
|
||||
format of filled template.
|
||||
|
||||
Args:
|
||||
fill_pairs (iterable) of tuples (key, value)
|
||||
Returns:
|
||||
(dict)
|
||||
('host', 'maya') > {'host':'maya', 'Host': 'Maya', 'HOST': 'MAYA'}
|
||||
|
||||
"""
|
||||
fill_data = {}
|
||||
for key, value in fill_pairs:
|
||||
# Handle cases when value is `None` (standalone publisher)
|
||||
|
|
@ -94,7 +111,7 @@ def get_subset_name(
|
|||
capitalized += value[1:]
|
||||
fill_data[key.capitalize()] = capitalized
|
||||
|
||||
return template.format(**fill_data)
|
||||
return fill_data
|
||||
|
||||
|
||||
def filter_pyblish_plugins(plugins):
|
||||
|
|
@ -177,7 +194,7 @@ def set_plugin_attributes_from_settings(
|
|||
# map plugin superclass to preset json. Currenly suppoted is load and
|
||||
# create (avalon.api.Loader and avalon.api.Creator)
|
||||
plugin_type = None
|
||||
if superclass.__name__.split(".")[-1] == "Loader":
|
||||
if superclass.__name__.split(".")[-1] in ("Loader", "SubsetLoader"):
|
||||
plugin_type = "load"
|
||||
elif superclass.__name__.split(".")[-1] == "Creator":
|
||||
plugin_type = "create"
|
||||
|
|
|
|||
|
|
@ -15,12 +15,17 @@ def default_custom_attributes_definition():
|
|||
|
||||
|
||||
def app_definitions_from_app_manager(app_manager):
|
||||
app_definitions = []
|
||||
_app_definitions = []
|
||||
for app_name, app in app_manager.applications.items():
|
||||
if app.enabled and app.is_host:
|
||||
app_definitions.append({
|
||||
app_name: app.full_label
|
||||
})
|
||||
_app_definitions.append(
|
||||
(app_name, app.full_label)
|
||||
)
|
||||
|
||||
# Sort items by label
|
||||
app_definitions = []
|
||||
for key, label in sorted(_app_definitions, key=lambda item: item[1]):
|
||||
app_definitions.append({key: label})
|
||||
|
||||
if not app_definitions:
|
||||
app_definitions.append({"empty": "< Empty >"})
|
||||
|
|
@ -28,11 +33,16 @@ def app_definitions_from_app_manager(app_manager):
|
|||
|
||||
|
||||
def tool_definitions_from_app_manager(app_manager):
|
||||
tools_data = []
|
||||
_tools_data = []
|
||||
for tool_name, tool in app_manager.tools.items():
|
||||
tools_data.append({
|
||||
tool_name: tool.label
|
||||
})
|
||||
_tools_data.append(
|
||||
(tool_name, tool.label)
|
||||
)
|
||||
|
||||
# Sort items by label
|
||||
tools_data = []
|
||||
for key, label in sorted(_tools_data, key=lambda item: item[1]):
|
||||
tools_data.append({key: label})
|
||||
|
||||
# Make sure there is at least one item
|
||||
if not tools_data:
|
||||
|
|
|
|||
|
|
@ -14,10 +14,15 @@ import avalon.pipeline
|
|||
from openpype.api import Anatomy
|
||||
|
||||
|
||||
class DeleteOldVersions(api.Loader):
|
||||
class DeleteOldVersions(api.SubsetLoader):
|
||||
"""Deletes specific number of old version"""
|
||||
|
||||
is_multiple_contexts_compatible = True
|
||||
sequence_splitter = "__sequence_splitter__"
|
||||
|
||||
representations = ["*"]
|
||||
families = ["*"]
|
||||
tool_names = ["library_loader"]
|
||||
|
||||
label = "Delete Old Versions"
|
||||
order = 35
|
||||
|
|
@ -259,9 +264,11 @@ class DeleteOldVersions(api.Loader):
|
|||
)
|
||||
|
||||
if not version_ids:
|
||||
msg = "Skipping processing. Nothing to delete."
|
||||
msg = "Skipping processing. Nothing to delete on {}/{}".format(
|
||||
asset["name"], subset["name"]
|
||||
)
|
||||
self.log.info(msg)
|
||||
self.message(msg)
|
||||
print(msg)
|
||||
return
|
||||
|
||||
repres = list(self.dbcon.find({
|
||||
|
|
@ -397,25 +404,30 @@ class DeleteOldVersions(api.Loader):
|
|||
self.log.error(msg)
|
||||
self.message(msg)
|
||||
|
||||
msg = "Total size of files: " + self.sizeof_fmt(size)
|
||||
self.log.info(msg)
|
||||
self.message(msg)
|
||||
return size
|
||||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
def load(self, contexts, name=None, namespace=None, options=None):
|
||||
try:
|
||||
versions_to_keep = 2
|
||||
remove_publish_folder = False
|
||||
if options:
|
||||
versions_to_keep = options.get(
|
||||
"versions_to_keep", versions_to_keep
|
||||
)
|
||||
remove_publish_folder = options.get(
|
||||
"remove_publish_folder", remove_publish_folder
|
||||
)
|
||||
size = 0
|
||||
for count, context in enumerate(contexts):
|
||||
versions_to_keep = 2
|
||||
remove_publish_folder = False
|
||||
if options:
|
||||
versions_to_keep = options.get(
|
||||
"versions_to_keep", versions_to_keep
|
||||
)
|
||||
remove_publish_folder = options.get(
|
||||
"remove_publish_folder", remove_publish_folder
|
||||
)
|
||||
|
||||
data = self.get_data(context, versions_to_keep)
|
||||
data = self.get_data(context, versions_to_keep)
|
||||
|
||||
self.main(data, remove_publish_folder)
|
||||
size += self.main(data, remove_publish_folder)
|
||||
print("Progressing {}/{}".format(count + 1, len(contexts)))
|
||||
|
||||
msg = "Total size of files: " + self.sizeof_fmt(size)
|
||||
self.log.info(msg)
|
||||
self.message(msg)
|
||||
|
||||
except Exception:
|
||||
self.log.error("Failed to delete versions.", exc_info=True)
|
||||
|
|
@ -425,6 +437,7 @@ class CalculateOldVersions(DeleteOldVersions):
|
|||
"""Calculate file size of old versions"""
|
||||
label = "Calculate Old Versions"
|
||||
order = 30
|
||||
tool_names = ["library_loader"]
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
|
|
@ -438,6 +451,9 @@ class CalculateOldVersions(DeleteOldVersions):
|
|||
def main(self, data, remove_publish_folder):
|
||||
size = 0
|
||||
|
||||
if not data:
|
||||
return size
|
||||
|
||||
if remove_publish_folder:
|
||||
size = self.delete_whole_dir_paths(
|
||||
data["dir_paths"].values(), delete=False
|
||||
|
|
@ -447,6 +463,4 @@ class CalculateOldVersions(DeleteOldVersions):
|
|||
data["dir_paths"], data["file_paths_by_dir"], delete=False
|
||||
)
|
||||
|
||||
msg = "Total size of files: " + self.sizeof_fmt(size)
|
||||
self.log.info(msg)
|
||||
self.message(msg)
|
||||
return size
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ Requires:
|
|||
context -> otioTimeline
|
||||
|
||||
Optional:
|
||||
otioClip.metadata -> masterLayer
|
||||
instance -> reviewTrack
|
||||
|
||||
Provides:
|
||||
instance -> otioReviewClips
|
||||
|
|
@ -26,12 +26,12 @@ class CollectOcioReview(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
# get basic variables
|
||||
otio_review_clips = list()
|
||||
otio_review_clips = []
|
||||
otio_timeline = instance.context.data["otioTimeline"]
|
||||
otio_clip = instance.data["otioClip"]
|
||||
|
||||
# optionally get `reviewTrack`
|
||||
review_track_name = otio_clip.metadata.get("reviewTrack")
|
||||
review_track_name = instance.data.get("reviewTrack")
|
||||
|
||||
# generate range in parent
|
||||
otio_tl_range = otio_clip.range_in_parent()
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin):
|
|||
hosts = ["resolve", "hiero"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if "audio" in instance.data["family"]:
|
||||
return
|
||||
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = list()
|
||||
version_data = dict()
|
||||
|
|
|
|||
|
|
@ -17,7 +17,8 @@ from openpype.lib import (
|
|||
get_pype_execute_args,
|
||||
should_decompress,
|
||||
get_decompress_dir,
|
||||
decompress
|
||||
decompress,
|
||||
CREATE_NO_WINDOW
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -253,9 +254,7 @@ class ExtractBurnin(openpype.api.Extractor):
|
|||
"env": {}
|
||||
}
|
||||
if platform.system().lower() == "windows":
|
||||
process_kwargs["creationflags"] = (
|
||||
subprocess.CREATE_NO_WINDOW
|
||||
)
|
||||
process_kwargs["creationflags"] = CREATE_NO_WINDOW
|
||||
|
||||
openpype.api.run_subprocess(args, **process_kwargs)
|
||||
# Remove the temporary json
|
||||
|
|
|
|||
295
openpype/plugins/publish/extract_otio_audio_tracks.py
Normal file
|
|
@ -0,0 +1,295 @@
|
|||
import os
|
||||
import pyblish
|
||||
import openpype.api
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path
|
||||
)
|
||||
import tempfile
|
||||
import opentimelineio as otio
|
||||
|
||||
|
||||
class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
||||
"""Extract Audio tracks from OTIO timeline.
|
||||
|
||||
Process will merge all found audio tracks into one long .wav file at frist
|
||||
stage. Then it will trim it into individual short audio files relative to
|
||||
asset length and add it to each marked instance data representation. This
|
||||
is influenced by instance data audio attribute """
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.44
|
||||
label = "Extract OTIO Audio Tracks"
|
||||
hosts = ["hiero", "resolve"]
|
||||
|
||||
# FFmpeg tools paths
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
def process(self, context):
|
||||
"""Convert otio audio track's content to audio representations
|
||||
|
||||
Args:
|
||||
context (pyblish.Context): context of publisher
|
||||
"""
|
||||
# split the long audio file to peces devided by isntances
|
||||
audio_instances = self.get_audio_instances(context)
|
||||
self.log.debug("Audio instances: {}".format(len(audio_instances)))
|
||||
|
||||
if len(audio_instances) < 1:
|
||||
self.log.info("No audio instances available")
|
||||
return
|
||||
|
||||
# get sequence
|
||||
otio_timeline = context.data["otioTimeline"]
|
||||
|
||||
# temp file
|
||||
audio_temp_fpath = self.create_temp_file("audio")
|
||||
|
||||
# get all audio inputs from otio timeline
|
||||
audio_inputs = self.get_audio_track_items(otio_timeline)
|
||||
|
||||
# create empty audio with longest duration
|
||||
empty = self.create_empty(audio_inputs)
|
||||
|
||||
# add empty to list of audio inputs
|
||||
audio_inputs.insert(0, empty)
|
||||
|
||||
# create cmd
|
||||
cmd = self.ffmpeg_path + " "
|
||||
cmd += self.create_cmd(audio_inputs)
|
||||
cmd += audio_temp_fpath
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(cmd))
|
||||
openpype.api.run_subprocess(
|
||||
cmd, shell=True, logger=self.log
|
||||
)
|
||||
|
||||
# remove empty
|
||||
os.remove(empty["mediaPath"])
|
||||
|
||||
# cut instance framerange and add to representations
|
||||
self.add_audio_to_instances(audio_temp_fpath, audio_instances)
|
||||
|
||||
# remove full mixed audio file
|
||||
os.remove(audio_temp_fpath)
|
||||
|
||||
def add_audio_to_instances(self, audio_file, instances):
|
||||
created_files = []
|
||||
for inst in instances:
|
||||
name = inst.data["asset"]
|
||||
|
||||
recycling_file = [f for f in created_files if name in f]
|
||||
|
||||
# frameranges
|
||||
timeline_in_h = inst.data["clipInH"]
|
||||
timeline_out_h = inst.data["clipOutH"]
|
||||
fps = inst.data["fps"]
|
||||
|
||||
# create duration
|
||||
duration = (timeline_out_h - timeline_in_h) + 1
|
||||
|
||||
# ffmpeg generate new file only if doesnt exists already
|
||||
if not recycling_file:
|
||||
# convert to seconds
|
||||
start_sec = float(timeline_in_h / fps)
|
||||
duration_sec = float(duration / fps)
|
||||
|
||||
# temp audio file
|
||||
audio_fpath = self.create_temp_file(name)
|
||||
|
||||
cmd = " ".join([
|
||||
self.ffmpeg_path,
|
||||
"-ss {}".format(start_sec),
|
||||
"-t {}".format(duration_sec),
|
||||
"-i {}".format(audio_file),
|
||||
audio_fpath
|
||||
])
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(cmd))
|
||||
openpype.api.run_subprocess(
|
||||
cmd, shell=True, logger=self.log
|
||||
)
|
||||
else:
|
||||
audio_fpath = recycling_file.pop()
|
||||
|
||||
if "audio" in (inst.data["families"] + [inst.data["family"]]):
|
||||
# create empty representation attr
|
||||
if "representations" not in inst.data:
|
||||
inst.data["representations"] = []
|
||||
# add to representations
|
||||
inst.data["representations"].append({
|
||||
"files": os.path.basename(audio_fpath),
|
||||
"name": "wav",
|
||||
"ext": "wav",
|
||||
"stagingDir": os.path.dirname(audio_fpath),
|
||||
"frameStart": 0,
|
||||
"frameEnd": duration
|
||||
})
|
||||
|
||||
elif "reviewAudio" in inst.data.keys():
|
||||
audio_attr = inst.data.get("audio") or []
|
||||
audio_attr.append({
|
||||
"filename": audio_fpath,
|
||||
"offset": 0
|
||||
})
|
||||
inst.data["audio"] = audio_attr
|
||||
|
||||
# add generated audio file to created files for recycling
|
||||
if audio_fpath not in created_files:
|
||||
created_files.append(audio_fpath)
|
||||
|
||||
def get_audio_instances(self, context):
|
||||
"""Return only instances which are having audio in families
|
||||
|
||||
Args:
|
||||
context (pyblish.context): context of publisher
|
||||
|
||||
Returns:
|
||||
list: list of selected instances
|
||||
"""
|
||||
return [
|
||||
_i for _i in context
|
||||
# filter only those with audio family
|
||||
# and also with reviewAudio data key
|
||||
if bool("audio" in (
|
||||
_i.data.get("families", []) + [_i.data["family"]])
|
||||
) or _i.data.get("reviewAudio")
|
||||
]
|
||||
|
||||
def get_audio_track_items(self, otio_timeline):
|
||||
"""Get all audio clips form OTIO audio tracks
|
||||
|
||||
Args:
|
||||
otio_timeline (otio.schema.timeline): timeline object
|
||||
|
||||
Returns:
|
||||
list: list of audio clip dictionaries
|
||||
"""
|
||||
output = []
|
||||
# go trough all audio tracks
|
||||
for otio_track in otio_timeline.tracks:
|
||||
if "Audio" not in otio_track.kind:
|
||||
continue
|
||||
self.log.debug("_" * 50)
|
||||
playhead = 0
|
||||
for otio_clip in otio_track:
|
||||
self.log.debug(otio_clip)
|
||||
if isinstance(otio_clip, otio.schema.Gap):
|
||||
playhead += otio_clip.source_range.duration.value
|
||||
elif isinstance(otio_clip, otio.schema.Clip):
|
||||
start = otio_clip.source_range.start_time.value
|
||||
duration = otio_clip.source_range.duration.value
|
||||
fps = otio_clip.source_range.start_time.rate
|
||||
media_path = otio_clip.media_reference.target_url
|
||||
input = {
|
||||
"mediaPath": media_path,
|
||||
"delayFrame": playhead,
|
||||
"startFrame": start,
|
||||
"durationFrame": duration,
|
||||
"delayMilSec": int(float(playhead / fps) * 1000),
|
||||
"startSec": float(start / fps),
|
||||
"durationSec": float(duration / fps),
|
||||
"fps": fps
|
||||
}
|
||||
if input not in output:
|
||||
output.append(input)
|
||||
self.log.debug("__ input: {}".format(input))
|
||||
playhead += otio_clip.source_range.duration.value
|
||||
|
||||
return output
|
||||
|
||||
def create_empty(self, inputs):
|
||||
"""Create an empty audio file used as duration placeholder
|
||||
|
||||
Args:
|
||||
inputs (list): list of audio clip dictionaries
|
||||
|
||||
Returns:
|
||||
dict: audio clip dictionary
|
||||
"""
|
||||
# temp file
|
||||
empty_fpath = self.create_temp_file("empty")
|
||||
|
||||
# get all end frames
|
||||
end_secs = [(_i["delayFrame"] + _i["durationFrame"]) / _i["fps"]
|
||||
for _i in inputs]
|
||||
# get the max of end frames
|
||||
max_duration_sec = max(end_secs)
|
||||
|
||||
# create empty cmd
|
||||
cmd = " ".join([
|
||||
self.ffmpeg_path,
|
||||
"-f lavfi",
|
||||
"-i anullsrc=channel_layout=stereo:sample_rate=48000",
|
||||
"-t {}".format(max_duration_sec),
|
||||
empty_fpath
|
||||
])
|
||||
|
||||
# generate empty with ffmpeg
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(cmd))
|
||||
|
||||
openpype.api.run_subprocess(
|
||||
cmd, shell=True, logger=self.log
|
||||
)
|
||||
|
||||
# return dict with output
|
||||
return {
|
||||
"mediaPath": empty_fpath,
|
||||
"delayMilSec": 0,
|
||||
"startSec": 0.00,
|
||||
"durationSec": max_duration_sec
|
||||
}
|
||||
|
||||
def create_cmd(self, inputs):
|
||||
"""Creating multiple input cmd string
|
||||
|
||||
Args:
|
||||
inputs (list): list of input dicts. Order mater.
|
||||
|
||||
Returns:
|
||||
str: the command body
|
||||
|
||||
"""
|
||||
# create cmd segments
|
||||
_inputs = ""
|
||||
_filters = "-filter_complex \""
|
||||
_channels = ""
|
||||
for index, input in enumerate(inputs):
|
||||
input_format = input.copy()
|
||||
input_format.update({"i": index})
|
||||
_inputs += (
|
||||
"-ss {startSec} "
|
||||
"-t {durationSec} "
|
||||
"-i \"{mediaPath}\" "
|
||||
).format(**input_format)
|
||||
|
||||
_filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format(
|
||||
**input_format)
|
||||
_channels += "[r{}]".format(index)
|
||||
|
||||
# merge all cmd segments together
|
||||
cmd = _inputs + _filters + _channels
|
||||
cmd += str(
|
||||
"amix=inputs={inputs}:duration=first:"
|
||||
"dropout_transition=1000,volume={inputs}[a]\" "
|
||||
).format(inputs=len(inputs))
|
||||
cmd += "-map \"[a]\" "
|
||||
|
||||
return cmd
|
||||
|
||||
def create_temp_file(self, name):
|
||||
"""Create temp wav file
|
||||
|
||||
Args:
|
||||
name (str): name to be used in file name
|
||||
|
||||
Returns:
|
||||
str: temp fpath
|
||||
"""
|
||||
return os.path.normpath(
|
||||
tempfile.mktemp(
|
||||
prefix="pyblish_tmp_{}_".format(name),
|
||||
suffix=".wav"
|
||||
)
|
||||
)
|
||||
|
|
@ -12,10 +12,13 @@ import shutil
|
|||
from pymongo import DeleteOne, InsertOne
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
from avalon.api import format_template_with_optional_keys
|
||||
from avalon.vendor import filelink
|
||||
import openpype.api
|
||||
from datetime import datetime
|
||||
# from pype.modules import ModulesManager
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.lib import prepare_template_data
|
||||
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
|
|
@ -294,7 +297,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
else:
|
||||
orig_transfers = list(instance.data['transfers'])
|
||||
|
||||
template_name = self.template_name_from_instance(instance)
|
||||
task_name = io.Session.get("AVALON_TASK")
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
key_values = {"families": family, "tasks": task_name}
|
||||
profile = filter_profiles(self.template_name_profiles, key_values,
|
||||
logger=self.log)
|
||||
if profile:
|
||||
template_name = profile["template_name"]
|
||||
|
||||
published_representations = {}
|
||||
for idx, repre in enumerate(instance.data["representations"]):
|
||||
|
|
@ -697,14 +707,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
# add group if available
|
||||
if instance.data.get("subsetGroup"):
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset["_id"])
|
||||
}, {'$set': {'data.subsetGroup':
|
||||
instance.data.get('subsetGroup')}}
|
||||
)
|
||||
self._set_subset_group(instance, subset["_id"])
|
||||
|
||||
# Update families on subset.
|
||||
families = [instance.data["family"]]
|
||||
|
|
@ -716,6 +719,65 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
return subset
|
||||
|
||||
def _set_subset_group(self, instance, subset_id):
|
||||
"""
|
||||
Mark subset as belonging to group in DB.
|
||||
|
||||
Uses Settings > Global > Publish plugins > IntegrateAssetNew
|
||||
|
||||
Args:
|
||||
instance (dict): processed instance
|
||||
subset_id (str): DB's subset _id
|
||||
|
||||
"""
|
||||
# add group if available
|
||||
integrate_new_sett = (instance.context.data["project_settings"]
|
||||
["global"]
|
||||
["publish"]
|
||||
["IntegrateAssetNew"])
|
||||
|
||||
profiles = integrate_new_sett["subset_grouping_profiles"]
|
||||
|
||||
filtering_criteria = {
|
||||
"families": instance.data["family"],
|
||||
"hosts": instance.data["anatomyData"]["app"],
|
||||
"tasks": instance.data["anatomyData"]["task"] or
|
||||
io.Session["AVALON_TASK"]
|
||||
}
|
||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
|
||||
filled_template = None
|
||||
if matching_profile:
|
||||
template = matching_profile["template"]
|
||||
fill_pairs = (
|
||||
("family", filtering_criteria["families"]),
|
||||
("task", filtering_criteria["tasks"]),
|
||||
("host", filtering_criteria["hosts"]),
|
||||
("subset", instance.data["subset"]),
|
||||
("renderlayer", instance.data.get("renderlayer"))
|
||||
)
|
||||
fill_pairs = prepare_template_data(fill_pairs)
|
||||
|
||||
try:
|
||||
filled_template = \
|
||||
format_template_with_optional_keys(fill_pairs, template)
|
||||
except KeyError:
|
||||
keys = []
|
||||
if fill_pairs:
|
||||
keys = fill_pairs.keys()
|
||||
|
||||
msg = "Subset grouping failed. " \
|
||||
"Only {} are expected in Settings".format(','.join(keys))
|
||||
self.log.warning(msg)
|
||||
|
||||
if instance.data.get("subsetGroup") or filled_template:
|
||||
subset_group = instance.data.get('subsetGroup') or filled_template
|
||||
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset_id)
|
||||
}, {'$set': {'data.subsetGroup': subset_group}})
|
||||
|
||||
def create_version(self, subset, version_number, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
|
|
@ -798,68 +860,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
family = instance.data["families"][0]
|
||||
return family
|
||||
|
||||
def template_name_from_instance(self, instance):
|
||||
template_name = self.default_template_name
|
||||
if not self.template_name_profiles:
|
||||
self.log.debug((
|
||||
"Template name profiles are not set."
|
||||
" Using default \"{}\""
|
||||
).format(template_name))
|
||||
return template_name
|
||||
|
||||
# Task name from session?
|
||||
task_name = io.Session.get("AVALON_TASK")
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
matching_profiles = {}
|
||||
highest_value = -1
|
||||
self.log.debug(
|
||||
"Template name profiles:\n{}".format(self.template_name_profiles)
|
||||
)
|
||||
for name, filters in self.template_name_profiles.items():
|
||||
value = 0
|
||||
families = filters.get("families")
|
||||
if families:
|
||||
if family not in families:
|
||||
continue
|
||||
value += 1
|
||||
|
||||
tasks = filters.get("tasks")
|
||||
if tasks:
|
||||
if task_name not in tasks:
|
||||
continue
|
||||
value += 1
|
||||
|
||||
if value > highest_value:
|
||||
matching_profiles = {}
|
||||
highest_value = value
|
||||
|
||||
if value == highest_value:
|
||||
matching_profiles[name] = filters
|
||||
|
||||
if len(matching_profiles) == 1:
|
||||
template_name = tuple(matching_profiles.keys())[0]
|
||||
self.log.debug(
|
||||
"Using template name \"{}\".".format(template_name)
|
||||
)
|
||||
|
||||
elif len(matching_profiles) > 1:
|
||||
template_name = tuple(matching_profiles.keys())[0]
|
||||
self.log.warning((
|
||||
"More than one template profiles matched"
|
||||
" Family \"{}\" and Task: \"{}\"."
|
||||
" Using first template name in row \"{}\"."
|
||||
).format(family, task_name, template_name))
|
||||
|
||||
else:
|
||||
self.log.debug((
|
||||
"None of template profiles matched"
|
||||
" Family \"{}\" and Task: \"{}\"."
|
||||
" Using default template name \"{}\""
|
||||
).format(family, task_name, template_name))
|
||||
|
||||
return template_name
|
||||
|
||||
def get_rootless_path(self, anatomy, path):
|
||||
""" Returns, if possible, path without absolute portion from host
|
||||
(eg. 'c:\' or '/opt/..')
|
||||
|
|
|
|||
|
|
@ -69,6 +69,87 @@ def get_fps(str_value):
|
|||
return str(fps)
|
||||
|
||||
|
||||
def _prores_codec_args(ffprobe_data):
|
||||
output = []
|
||||
|
||||
tags = ffprobe_data.get("tags") or {}
|
||||
encoder = tags.get("encoder") or ""
|
||||
if encoder.endswith("prores_ks"):
|
||||
codec_name = "prores_ks"
|
||||
|
||||
elif encoder.endswith("prores_aw"):
|
||||
codec_name = "prores_aw"
|
||||
|
||||
else:
|
||||
codec_name = "prores"
|
||||
|
||||
output.extend(["-codec:v", codec_name])
|
||||
|
||||
pix_fmt = ffprobe_data.get("pix_fmt")
|
||||
if pix_fmt:
|
||||
output.extend(["-pix_fmt", pix_fmt])
|
||||
|
||||
# Rest of arguments is prores_kw specific
|
||||
if codec_name == "prores_ks":
|
||||
codec_tag_to_profile_map = {
|
||||
"apco": "proxy",
|
||||
"apcs": "lt",
|
||||
"apcn": "standard",
|
||||
"apch": "hq",
|
||||
"ap4h": "4444",
|
||||
"ap4x": "4444xq"
|
||||
}
|
||||
codec_tag_str = ffprobe_data.get("codec_tag_string")
|
||||
if codec_tag_str:
|
||||
profile = codec_tag_to_profile_map.get(codec_tag_str)
|
||||
if profile:
|
||||
output.extend(["-profile:v", profile])
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _h264_codec_args(ffprobe_data):
|
||||
output = []
|
||||
|
||||
output.extend(["-codec:v", "h264"])
|
||||
|
||||
pix_fmt = ffprobe_data.get("pix_fmt")
|
||||
if pix_fmt:
|
||||
output.extend(["-pix_fmt", pix_fmt])
|
||||
|
||||
output.extend(["-intra"])
|
||||
output.extend(["-g", "1"])
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_codec_args(ffprobe_data):
|
||||
codec_name = ffprobe_data.get("codec_name")
|
||||
# Codec "prores"
|
||||
if codec_name == "prores":
|
||||
return _prores_codec_args(ffprobe_data)
|
||||
|
||||
# Codec "h264"
|
||||
if codec_name == "h264":
|
||||
return _h264_codec_args(ffprobe_data)
|
||||
|
||||
output = []
|
||||
if codec_name:
|
||||
output.extend(["-codec:v", codec_name])
|
||||
|
||||
bit_rate = ffprobe_data.get("bit_rate")
|
||||
if bit_rate:
|
||||
output.extend(["-b:v", bit_rate])
|
||||
|
||||
pix_fmt = ffprobe_data.get("pix_fmt")
|
||||
if pix_fmt:
|
||||
output.extend(["-pix_fmt", pix_fmt])
|
||||
|
||||
output.extend(["-g", "1"])
|
||||
|
||||
return output
|
||||
|
||||
|
||||
class ModifiedBurnins(ffmpeg_burnins.Burnins):
|
||||
'''
|
||||
This is modification of OTIO FFmpeg Burnin adapter.
|
||||
|
|
@ -558,38 +639,13 @@ def burnins_from_data(
|
|||
if codec_data:
|
||||
# Use codec definition from method arguments
|
||||
ffmpeg_args = codec_data
|
||||
ffmpeg_args.append("-g 1")
|
||||
|
||||
else:
|
||||
ffprobe_data = burnin._streams[0]
|
||||
codec_name = ffprobe_data.get("codec_name")
|
||||
if codec_name:
|
||||
if codec_name == "prores":
|
||||
tags = ffprobe_data.get("tags") or {}
|
||||
encoder = tags.get("encoder") or ""
|
||||
if encoder.endswith("prores_ks"):
|
||||
codec_name = "prores_ks"
|
||||
|
||||
elif encoder.endswith("prores_aw"):
|
||||
codec_name = "prores_aw"
|
||||
ffmpeg_args.append("-codec:v {}".format(codec_name))
|
||||
|
||||
profile_name = ffprobe_data.get("profile")
|
||||
if profile_name:
|
||||
# lower profile name and repalce spaces with underscore
|
||||
profile_name = profile_name.replace(" ", "_").lower()
|
||||
ffmpeg_args.append("-profile:v {}".format(profile_name))
|
||||
|
||||
bit_rate = ffprobe_data.get("bit_rate")
|
||||
if bit_rate:
|
||||
ffmpeg_args.append("-b:v {}".format(bit_rate))
|
||||
|
||||
pix_fmt = ffprobe_data.get("pix_fmt")
|
||||
if pix_fmt:
|
||||
ffmpeg_args.append("-pix_fmt {}".format(pix_fmt))
|
||||
ffmpeg_args.extend(get_codec_args(ffprobe_data))
|
||||
|
||||
# Use group one (same as `-intra` argument, which is deprecated)
|
||||
ffmpeg_args.append("-g 1")
|
||||
|
||||
ffmpeg_args_str = " ".join(ffmpeg_args)
|
||||
burnin.render(
|
||||
output_path, args=ffmpeg_args_str, overwrite=overwrite, **data
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
"path": "{@folder}/{@file}"
|
||||
},
|
||||
"render": {
|
||||
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/render/{subset}/{@version}",
|
||||
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}",
|
||||
"file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}",
|
||||
"path": "{@folder}/{@file}"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,13 +1,24 @@
|
|||
{
|
||||
"publish": {
|
||||
"IntegrateHeroVersion": {
|
||||
"enabled": true
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"families": [
|
||||
"model",
|
||||
"rig",
|
||||
"look",
|
||||
"pointcache",
|
||||
"animation",
|
||||
"setdress",
|
||||
"layout",
|
||||
"mayaAscii"
|
||||
]
|
||||
},
|
||||
"ExtractJpegEXR": {
|
||||
"enabled": true,
|
||||
"ffmpeg_args": {
|
||||
"input": [
|
||||
"-gamma 2.2"
|
||||
"-apply_trc gamma22"
|
||||
],
|
||||
"output": []
|
||||
}
|
||||
|
|
@ -116,19 +127,30 @@
|
|||
]
|
||||
},
|
||||
"IntegrateAssetNew": {
|
||||
"template_name_profiles": {
|
||||
"publish": {
|
||||
"template_name_profiles": [
|
||||
{
|
||||
"families": [],
|
||||
"tasks": []
|
||||
"tasks": [],
|
||||
"template_name": "publish"
|
||||
},
|
||||
"render": {
|
||||
{
|
||||
"families": [
|
||||
"review",
|
||||
"render",
|
||||
"prerender"
|
||||
]
|
||||
],
|
||||
"tasks": [],
|
||||
"template_name": "render"
|
||||
}
|
||||
}
|
||||
],
|
||||
"subset_grouping_profiles": [
|
||||
{
|
||||
"families": [],
|
||||
"hosts": [],
|
||||
"tasks": [],
|
||||
"template": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
"ProcessSubmittedJobOnFarm": {
|
||||
"enabled": true,
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"__dynamic_keys_labels__": {
|
||||
"3-2": "3.2",
|
||||
"3-1": "3.2"
|
||||
"3-1": "3.1"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import re
|
||||
import copy
|
||||
|
||||
from .lib import (
|
||||
NOT_SET,
|
||||
OverrideState
|
||||
|
|
@ -94,11 +93,18 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
for key in prev_keys:
|
||||
self.pop(key)
|
||||
|
||||
def _convert_to_valid_type(self, value):
|
||||
try:
|
||||
return dict(value)
|
||||
except Exception:
|
||||
pass
|
||||
return super(DictMutableKeysEntity, self)._convert_to_valid_type(value)
|
||||
|
||||
def set_key_value(self, key, value):
|
||||
# TODO Check for value type if is Settings entity?
|
||||
child_obj = self.children_by_key.get(key)
|
||||
if not child_obj:
|
||||
if not KEY_REGEX.match(key):
|
||||
if not self.store_as_list and not KEY_REGEX.match(key):
|
||||
raise InvalidKeySymbols(self.path, key)
|
||||
|
||||
child_obj = self.add_key(key)
|
||||
|
|
@ -112,7 +118,7 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
if new_key == old_key:
|
||||
return
|
||||
|
||||
if not KEY_REGEX.match(new_key):
|
||||
if not self.store_as_list and not KEY_REGEX.match(new_key):
|
||||
raise InvalidKeySymbols(self.path, new_key)
|
||||
|
||||
self.children_by_key[new_key] = self.children_by_key.pop(old_key)
|
||||
|
|
@ -125,11 +131,15 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
self._has_project_override = True
|
||||
self.on_change()
|
||||
|
||||
def _add_key(self, key):
|
||||
def _add_key(self, key, _ingore_key_validation=False):
|
||||
if key in self.children_by_key:
|
||||
self.pop(key)
|
||||
|
||||
if not KEY_REGEX.match(key):
|
||||
if (
|
||||
not _ingore_key_validation
|
||||
and not self.store_as_list
|
||||
and not KEY_REGEX.match(key)
|
||||
):
|
||||
raise InvalidKeySymbols(self.path, key)
|
||||
|
||||
if self.value_is_env_group:
|
||||
|
|
@ -194,6 +204,7 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
self.children_by_key = {}
|
||||
self.children_label_by_id = {}
|
||||
|
||||
self.store_as_list = self.schema_data.get("store_as_list") or False
|
||||
self.value_is_env_group = (
|
||||
self.schema_data.get("value_is_env_group") or False
|
||||
)
|
||||
|
|
@ -237,6 +248,10 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
if used_temp_label:
|
||||
self.label = None
|
||||
|
||||
if self.value_is_env_group and self.store_as_list:
|
||||
reason = "Item can't store environments metadata to list output."
|
||||
raise EntitySchemaError(self, reason)
|
||||
|
||||
if not self.schema_data.get("object_type"):
|
||||
reason = (
|
||||
"Modifiable dictionary must have specified `object_type`."
|
||||
|
|
@ -332,6 +347,7 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
|
||||
using_project_overrides = False
|
||||
using_studio_overrides = False
|
||||
using_default_values = False
|
||||
if (
|
||||
state is OverrideState.PROJECT
|
||||
and self.had_project_override
|
||||
|
|
@ -349,14 +365,28 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
metadata = self._studio_override_metadata
|
||||
|
||||
else:
|
||||
using_default_values = True
|
||||
value = self._default_value
|
||||
metadata = self._default_metadata
|
||||
|
||||
if value is NOT_SET:
|
||||
using_default_values = False
|
||||
value = self.value_on_not_set
|
||||
|
||||
using_values_from_state = False
|
||||
if state is OverrideState.PROJECT:
|
||||
using_values_from_state = using_project_overrides
|
||||
elif state is OverrideState.STUDIO:
|
||||
using_values_from_state = using_studio_overrides
|
||||
elif state is OverrideState.DEFAULTS:
|
||||
using_values_from_state = using_default_values
|
||||
|
||||
new_value = copy.deepcopy(value)
|
||||
|
||||
if using_values_from_state:
|
||||
initial_value = copy.deepcopy(value)
|
||||
initial_value.update(metadata)
|
||||
|
||||
# Simulate `clear` method without triggering value change
|
||||
for key in tuple(self.children_by_key.keys()):
|
||||
self.children_by_key.pop(key)
|
||||
|
|
@ -369,30 +399,51 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
children_label_by_id = {}
|
||||
metadata_labels = metadata.get(M_DYNAMIC_KEY_LABEL) or {}
|
||||
for _key, _value in new_value.items():
|
||||
if not KEY_REGEX.match(_key):
|
||||
label = metadata_labels.get(_key)
|
||||
if self.store_as_list or KEY_REGEX.match(_key):
|
||||
child_entity = self._add_key(_key)
|
||||
else:
|
||||
# Replace invalid characters with underscore
|
||||
# - this is safety to not break already existing settings
|
||||
_key = re.sub(
|
||||
r"[^{}]+".format(KEY_ALLOWED_SYMBOLS),
|
||||
"_",
|
||||
_key
|
||||
)
|
||||
new_key = self._convert_to_regex_valid_key(_key)
|
||||
if not using_values_from_state:
|
||||
child_entity = self._add_key(new_key)
|
||||
else:
|
||||
child_entity = self._add_key(
|
||||
_key, _ingore_key_validation=True
|
||||
)
|
||||
self.change_key(_key, new_key)
|
||||
_key = new_key
|
||||
|
||||
if not label:
|
||||
label = metadata_labels.get(new_key)
|
||||
|
||||
child_entity = self._add_key(_key)
|
||||
child_entity.update_default_value(_value)
|
||||
if using_project_overrides:
|
||||
child_entity.update_project_value(_value)
|
||||
elif using_studio_overrides:
|
||||
child_entity.update_studio_value(_value)
|
||||
|
||||
label = metadata_labels.get(_key)
|
||||
if label:
|
||||
children_label_by_id[child_entity.id] = label
|
||||
child_entity.set_override_state(state)
|
||||
|
||||
self.children_label_by_id = children_label_by_id
|
||||
|
||||
self.initial_value = self.settings_value()
|
||||
_settings_value = self.settings_value()
|
||||
if using_values_from_state:
|
||||
if _settings_value is NOT_SET:
|
||||
initial_value = NOT_SET
|
||||
else:
|
||||
initial_value = _settings_value
|
||||
self.initial_value = initial_value
|
||||
|
||||
def _convert_to_regex_valid_key(self, key):
|
||||
return re.sub(
|
||||
r"[^{}]+".format(KEY_ALLOWED_SYMBOLS),
|
||||
"_",
|
||||
key
|
||||
)
|
||||
|
||||
def children_key_by_id(self):
|
||||
return {
|
||||
|
|
@ -402,6 +453,12 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
|
||||
@property
|
||||
def value(self):
|
||||
if self.store_as_list:
|
||||
output = []
|
||||
for key, child_entity in self.children_by_key.items():
|
||||
output.append(key, child_entity.value)
|
||||
return output
|
||||
|
||||
output = {}
|
||||
for key, child_entity in self.children_by_key.items():
|
||||
output[key] = child_entity.value
|
||||
|
|
@ -481,6 +538,13 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
return False
|
||||
|
||||
def _settings_value(self):
|
||||
if self.store_as_list:
|
||||
output = []
|
||||
for key, child_entity in self.children_by_key.items():
|
||||
child_value = child_entity.settings_value()
|
||||
output.append([key, child_value])
|
||||
return output
|
||||
|
||||
output = {}
|
||||
for key, child_entity in self.children_by_key.items():
|
||||
child_value = child_entity.settings_value()
|
||||
|
|
@ -563,7 +627,8 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
|
||||
# Create new children
|
||||
for _key, _value in new_value.items():
|
||||
child_entity = self._add_key(_key)
|
||||
new_key = self._convert_to_regex_valid_key(_key)
|
||||
child_entity = self._add_key(new_key)
|
||||
child_entity.update_default_value(_value)
|
||||
label = metadata_labels.get(_key)
|
||||
if label:
|
||||
|
|
@ -608,7 +673,8 @@ class DictMutableKeysEntity(EndpointEntity):
|
|||
|
||||
# Create new children
|
||||
for _key, _value in new_value.items():
|
||||
child_entity = self._add_key(_key)
|
||||
new_key = self._convert_to_regex_valid_key(_key)
|
||||
child_entity = self._add_key(new_key)
|
||||
child_entity.update_default_value(_value)
|
||||
if self._has_studio_override:
|
||||
child_entity.update_studio_value(_value)
|
||||
|
|
|
|||
|
|
@ -61,39 +61,36 @@ class ListEntity(EndpointEntity):
|
|||
def append(self, item):
|
||||
child_obj = self.add_new_item(trigger_change=False)
|
||||
child_obj.set(item)
|
||||
self.on_change()
|
||||
self.on_child_change(child_obj)
|
||||
|
||||
def extend(self, items):
|
||||
for item in items:
|
||||
self.append(item)
|
||||
|
||||
def clear(self):
|
||||
self.children.clear()
|
||||
self.on_change()
|
||||
if not self.children:
|
||||
return
|
||||
|
||||
first_item = self.children.pop(0)
|
||||
while self.children:
|
||||
self.children.pop(0)
|
||||
self.on_child_change(first_item)
|
||||
|
||||
def pop(self, idx):
|
||||
item = self.children.pop(idx)
|
||||
self.on_change()
|
||||
self.on_child_change(item)
|
||||
return item
|
||||
|
||||
def remove(self, item):
|
||||
for idx, child_obj in enumerate(self.children):
|
||||
found = False
|
||||
if isinstance(item, BaseEntity):
|
||||
if child_obj is item:
|
||||
found = True
|
||||
elif child_obj.value == item:
|
||||
found = True
|
||||
|
||||
if found:
|
||||
self.pop(idx)
|
||||
return
|
||||
raise ValueError("ListEntity.remove(x): x not in ListEntity")
|
||||
try:
|
||||
self.pop(self.index(item))
|
||||
except ValueError:
|
||||
raise ValueError("ListEntity.remove(x): x not in ListEntity")
|
||||
|
||||
def insert(self, idx, item):
|
||||
child_obj = self.add_new_item(idx, trigger_change=False)
|
||||
child_obj.set(item)
|
||||
self.on_change()
|
||||
self.on_child_change(child_obj)
|
||||
|
||||
def _add_new_item(self, idx=None):
|
||||
child_obj = self.create_schema_object(self.item_schema, self, True)
|
||||
|
|
@ -106,13 +103,9 @@ class ListEntity(EndpointEntity):
|
|||
def add_new_item(self, idx=None, trigger_change=True):
|
||||
child_obj = self._add_new_item(idx)
|
||||
child_obj.set_override_state(self._override_state)
|
||||
if self._override_state is OverrideState.STUDIO:
|
||||
child_obj.add_to_studio_default([])
|
||||
elif self._override_state is OverrideState.PROJECT:
|
||||
child_obj.add_to_project_default([])
|
||||
|
||||
if trigger_change:
|
||||
self.on_change()
|
||||
self.on_child_change(child_obj)
|
||||
return child_obj
|
||||
|
||||
def swap_items(self, item_1, item_2):
|
||||
|
|
|
|||
|
|
@ -16,6 +16,17 @@
|
|||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "optional",
|
||||
"label": "Optional"
|
||||
},
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -420,9 +431,80 @@
|
|||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "raw-json",
|
||||
"type": "list",
|
||||
"key": "template_name_profiles",
|
||||
"label": "template_name_profiles"
|
||||
"label": "Template name profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": ""
|
||||
},
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "template_name",
|
||||
"label": "Template name"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "subset_grouping_profiles",
|
||||
"label": "Subset grouping profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Set all published instances as a part of specific group named according to 'Template'. <br>Implemented all variants of placeholders [{task},{family},{host},{subset},{renderlayer}]"
|
||||
},
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "hosts",
|
||||
"label": "Hosts",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "template",
|
||||
"label": "Template"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@
|
|||
"type": "dict-modifiable",
|
||||
"key": "variants",
|
||||
"collapsible_key": true,
|
||||
"dynamic_label": false,
|
||||
"use_label_wrap": false,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -15,6 +15,8 @@ import maya.api.OpenMaya as om
|
|||
|
||||
from . import widgets
|
||||
from . import commands
|
||||
from . vray_proxies import vrayproxy_assign_look
|
||||
|
||||
|
||||
module = sys.modules[__name__]
|
||||
module.window = None
|
||||
|
|
@ -211,9 +213,17 @@ class App(QtWidgets.QWidget):
|
|||
subset_name,
|
||||
asset))
|
||||
|
||||
self.echo("Getting vray proxy nodes ...")
|
||||
vray_proxies = set(cmds.ls(type="VRayProxy"))
|
||||
nodes = set(item["nodes"]).difference(vray_proxies)
|
||||
|
||||
# Assign look
|
||||
assign_look_by_version(nodes=item["nodes"],
|
||||
version_id=version["_id"])
|
||||
if nodes:
|
||||
assign_look_by_version([nodes], version_id=version["_id"])
|
||||
|
||||
if vray_proxies:
|
||||
for vp in vray_proxies:
|
||||
vrayproxy_assign_look(vp, subset_name)
|
||||
|
||||
end = time.time()
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ from openpype.hosts.maya.api import lib
|
|||
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
import vray_proxies
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
@ -65,9 +68,7 @@ def get_selected_nodes():
|
|||
|
||||
selection = cmds.ls(selection=True, long=True)
|
||||
hierarchy = list_descendents(selection)
|
||||
nodes = list(set(selection + hierarchy))
|
||||
|
||||
return nodes
|
||||
return list(set(selection + hierarchy))
|
||||
|
||||
|
||||
def get_all_asset_nodes():
|
||||
|
|
@ -132,6 +133,21 @@ def create_items_from_nodes(nodes):
|
|||
asset_view_items = []
|
||||
|
||||
id_hashes = create_asset_id_hash(nodes)
|
||||
|
||||
# get ids from alembic
|
||||
vray_proxy_nodes = cmds.ls(nodes, type="VRayProxy")
|
||||
for vp in vray_proxy_nodes:
|
||||
path = cmds.getAttr("{}.fileName".format(vp))
|
||||
ids = vray_proxies.get_alembic_ids_cache(path)
|
||||
parent_id = {}
|
||||
for k, _ in ids.items():
|
||||
pid = k.split(":")[0]
|
||||
if not parent_id.get(pid):
|
||||
parent_id.update({pid: [vp]})
|
||||
|
||||
print("Adding ids from alembic {}".format(path))
|
||||
id_hashes.update(parent_id)
|
||||
|
||||
if not id_hashes:
|
||||
return asset_view_items
|
||||
|
||||
|
|
@ -172,7 +188,7 @@ def remove_unused_looks():
|
|||
|
||||
host = api.registered_host()
|
||||
|
||||
unused = list()
|
||||
unused = []
|
||||
for container in host.ls():
|
||||
if container['loader'] == "LookLoader":
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
|
|
|
|||
312
openpype/tools/mayalookassigner/vray_proxies.py
Normal file
|
|
@ -0,0 +1,312 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tools for loading looks to vray proxies."""
|
||||
import os
|
||||
from collections import defaultdict
|
||||
import logging
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
import alembic.Abc
|
||||
from maya import cmds
|
||||
|
||||
import avalon.io as io
|
||||
import avalon.maya
|
||||
import avalon.api as api
|
||||
|
||||
import openpype.hosts.maya.api.lib as lib
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_alembic_paths_by_property(filename, attr, verbose=False):
|
||||
# type: (str, str, bool) -> dict
|
||||
"""Return attribute value per objects in the Alembic file.
|
||||
|
||||
Reads an Alembic archive hierarchy and retrieves the
|
||||
value from the `attr` properties on the objects.
|
||||
|
||||
Args:
|
||||
filename (str): Full path to Alembic archive to read.
|
||||
attr (str): Id attribute.
|
||||
verbose (bool): Whether to verbosely log missing attributes.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of node full path with its id
|
||||
|
||||
"""
|
||||
# Normalize alembic path
|
||||
filename = os.path.normpath(filename)
|
||||
filename = filename.replace("\\", "/")
|
||||
filename = str(filename) # path must be string
|
||||
|
||||
archive = alembic.Abc.IArchive(filename)
|
||||
root = archive.getTop()
|
||||
|
||||
iterator = list(root.children)
|
||||
obj_ids = {}
|
||||
|
||||
for obj in iterator:
|
||||
name = obj.getFullName()
|
||||
|
||||
# include children for coming iterations
|
||||
iterator.extend(obj.children)
|
||||
|
||||
props = obj.getProperties()
|
||||
if props.getNumProperties() == 0:
|
||||
# Skip those without properties, e.g. '/materials' in a gpuCache
|
||||
continue
|
||||
|
||||
# THe custom attribute is under the properties' first container under
|
||||
# the ".arbGeomParams"
|
||||
prop = props.getProperty(0) # get base property
|
||||
|
||||
_property = None
|
||||
try:
|
||||
geo_params = prop.getProperty('.arbGeomParams')
|
||||
_property = geo_params.getProperty(attr)
|
||||
except KeyError:
|
||||
if verbose:
|
||||
log.debug("Missing attr on: {0}".format(name))
|
||||
continue
|
||||
|
||||
if not _property.isConstant():
|
||||
log.warning("Id not constant on: {0}".format(name))
|
||||
|
||||
# Get first value sample
|
||||
value = _property.getValue()[0]
|
||||
|
||||
obj_ids[name] = value
|
||||
|
||||
return obj_ids
|
||||
|
||||
|
||||
def get_alembic_ids_cache(path):
|
||||
# type: (str) -> dict
|
||||
"""Build a id to node mapping in Alembic file.
|
||||
|
||||
Nodes without IDs are ignored.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of id to nodes in the Alembic.
|
||||
|
||||
"""
|
||||
node_ids = get_alembic_paths_by_property(path, attr="cbId")
|
||||
id_nodes = defaultdict(list)
|
||||
for node, _id in six.iteritems(node_ids):
|
||||
id_nodes[_id].append(node)
|
||||
|
||||
return dict(six.iteritems(id_nodes))
|
||||
|
||||
|
||||
def assign_vrayproxy_shaders(vrayproxy, assignments):
|
||||
# type: (str, dict) -> None
|
||||
"""Assign shaders to content of Vray Proxy.
|
||||
|
||||
This will create shader overrides on Vray Proxy to assign shaders to its
|
||||
content.
|
||||
|
||||
Todo:
|
||||
Allow to optimize and assign a single shader to multiple shapes at
|
||||
once or maybe even set it to the highest available path?
|
||||
|
||||
Args:
|
||||
vrayproxy (str): Name of Vray Proxy
|
||||
assignments (dict): Mapping of shader assignments.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
# Clear all current shader assignments
|
||||
plug = vrayproxy + ".shaders"
|
||||
num = cmds.getAttr(plug, size=True)
|
||||
for i in reversed(range(num)):
|
||||
cmds.removeMultiInstance("{}[{}]".format(plug, i), b=True)
|
||||
|
||||
# Create new assignment overrides
|
||||
index = 0
|
||||
for material, paths in assignments.items():
|
||||
for path in paths:
|
||||
plug = "{}.shaders[{}]".format(vrayproxy, index)
|
||||
cmds.setAttr(plug + ".shadersNames", path, type="string")
|
||||
cmds.connectAttr(material + ".outColor",
|
||||
plug + ".shadersConnections", force=True)
|
||||
index += 1
|
||||
|
||||
|
||||
def get_look_relationships(version_id):
|
||||
# type: (str) -> dict
|
||||
"""Get relations for the look.
|
||||
|
||||
Args:
|
||||
version_id (str): Parent version Id.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of relations.
|
||||
|
||||
"""
|
||||
json_representation = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "json"})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
return relationships
|
||||
|
||||
|
||||
def load_look(version_id):
|
||||
# type: (str) -> list
|
||||
"""Load look from version.
|
||||
|
||||
Get look from version and invoke Loader for it.
|
||||
|
||||
Args:
|
||||
version_id (str): Version ID
|
||||
|
||||
Returns:
|
||||
list of shader nodes.
|
||||
|
||||
"""
|
||||
# Get representations of shader file and relationships
|
||||
look_representation = io.find_one({"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": "ma"})
|
||||
|
||||
# See if representation is already loaded, if so reuse it.
|
||||
host = api.registered_host()
|
||||
representation_id = str(look_representation['_id'])
|
||||
for container in host.ls():
|
||||
if (container['loader'] == "LookLoader" and
|
||||
container['representation'] == representation_id):
|
||||
log.info("Reusing loaded look ...")
|
||||
container_node = container['objectName']
|
||||
break
|
||||
else:
|
||||
log.info("Using look for the first time ...")
|
||||
|
||||
# Load file
|
||||
loaders = api.loaders_from_representation(api.discover(api.Loader),
|
||||
representation_id)
|
||||
loader = next(
|
||||
(i for i in loaders if i.__name__ == "LookLoader"), None)
|
||||
if loader is None:
|
||||
raise RuntimeError("Could not find LookLoader, this is a bug")
|
||||
|
||||
# Reference the look file
|
||||
with avalon.maya.maintained_selection():
|
||||
container_node = api.load(loader, look_representation)
|
||||
|
||||
# Get container members
|
||||
shader_nodes = cmds.sets(container_node, query=True)
|
||||
return shader_nodes
|
||||
|
||||
|
||||
def get_latest_version(asset_id, subset):
|
||||
# type: (str, str) -> dict
|
||||
"""Get latest version of subset.
|
||||
|
||||
Args:
|
||||
asset_id (str): Asset ID
|
||||
subset (str): Subset name.
|
||||
|
||||
Returns:
|
||||
Latest version
|
||||
|
||||
Throws:
|
||||
RuntimeError: When subset or version doesn't exist.
|
||||
|
||||
"""
|
||||
subset = io.find_one({"name": subset,
|
||||
"parent": io.ObjectId(asset_id),
|
||||
"type": "subset"})
|
||||
if not subset:
|
||||
raise RuntimeError("Subset does not exist: %s" % subset)
|
||||
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
if not version:
|
||||
raise RuntimeError("Version does not exist.")
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def vrayproxy_assign_look(vrayproxy, subset="lookDefault"):
|
||||
# type: (str, str) -> None
|
||||
"""Assign look to vray proxy.
|
||||
|
||||
Args:
|
||||
vrayproxy (str): Name of vrayproxy to apply look to.
|
||||
subset (str): Name of look subset.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
"""
|
||||
path = cmds.getAttr(vrayproxy + ".fileName")
|
||||
|
||||
nodes_by_id = get_alembic_ids_cache(path)
|
||||
if not nodes_by_id:
|
||||
log.warning("Alembic file has no cbId attributes: %s" % path)
|
||||
return
|
||||
|
||||
# Group by asset id so we run over the look per asset
|
||||
node_ids_by_asset_id = defaultdict(set)
|
||||
for node_id in nodes_by_id:
|
||||
asset_id = node_id.split(":", 1)[0]
|
||||
node_ids_by_asset_id[asset_id].add(node_id)
|
||||
|
||||
for asset_id, node_ids in node_ids_by_asset_id.items():
|
||||
|
||||
# Get latest look version
|
||||
try:
|
||||
version = get_latest_version(asset_id, subset=subset)
|
||||
except RuntimeError as exc:
|
||||
print(exc)
|
||||
continue
|
||||
|
||||
relationships = get_look_relationships(version["_id"])
|
||||
shadernodes = load_look(version["_id"])
|
||||
|
||||
# Get only the node ids and paths related to this asset
|
||||
# And get the shader edits the look supplies
|
||||
asset_nodes_by_id = {
|
||||
node_id: nodes_by_id[node_id] for node_id in node_ids
|
||||
}
|
||||
edits = list(
|
||||
lib.iter_shader_edits(
|
||||
relationships, shadernodes, asset_nodes_by_id))
|
||||
|
||||
# Create assignments
|
||||
assignments = {}
|
||||
for edit in edits:
|
||||
if edit["action"] == "assign":
|
||||
nodes = edit["nodes"]
|
||||
shader = edit["shader"]
|
||||
if not cmds.ls(shader, type="shadingEngine"):
|
||||
print("Skipping non-shader: %s" % shader)
|
||||
continue
|
||||
|
||||
inputs = cmds.listConnections(
|
||||
shader + ".surfaceShader", source=True)
|
||||
if not inputs:
|
||||
print("Shading engine missing material: %s" % shader)
|
||||
|
||||
# Strip off component assignments
|
||||
for i, node in enumerate(nodes):
|
||||
if "." in node:
|
||||
log.warning(
|
||||
("Converting face assignment to full object "
|
||||
"assignment. This conversion can be lossy: "
|
||||
"{}").format(node))
|
||||
nodes[i] = node.split(".")[0]
|
||||
|
||||
material = inputs[0]
|
||||
assignments[material] = nodes
|
||||
|
||||
assign_vrayproxy_shaders(vrayproxy, assignments)
|
||||
|
|
@ -122,7 +122,7 @@ class AssetOutliner(QtWidgets.QWidget):
|
|||
|
||||
# Collect the asset item entries per asset
|
||||
# and collect the namespaces we'd like to apply
|
||||
assets = dict()
|
||||
assets = {}
|
||||
asset_namespaces = defaultdict(set)
|
||||
for item in items:
|
||||
asset_id = str(item["asset"]["_id"])
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
import sys
|
||||
from Qt import QtWidgets, QtGui
|
||||
from .lib import is_password_required
|
||||
from .lib import (
|
||||
is_password_required,
|
||||
BTN_FIXED_SIZE,
|
||||
CHILD_OFFSET
|
||||
)
|
||||
from .widgets import PasswordDialog
|
||||
from .local_settings import LocalSettingsWindow
|
||||
from .settings import (
|
||||
|
|
@ -32,7 +36,11 @@ def main(user_role=None):
|
|||
|
||||
__all__ = (
|
||||
"is_password_required",
|
||||
"BTN_FIXED_SIZE",
|
||||
"CHILD_OFFSET",
|
||||
|
||||
"style",
|
||||
|
||||
"PasswordDialog",
|
||||
"MainWidget",
|
||||
"ProjectListWidget",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
CHILD_OFFSET = 15
|
||||
BTN_FIXED_SIZE = 20
|
||||
|
||||
|
||||
def is_password_required():
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from .widgets import (
|
|||
Separator,
|
||||
ExpandingWidget
|
||||
)
|
||||
from .constants import CHILD_OFFSET
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
|
||||
|
||||
class AppVariantWidget(QtWidgets.QWidget):
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@ LOCAL_APPS_KEY = "applications"
|
|||
# Roots key constant
|
||||
LOCAL_ROOTS_KEY = "roots"
|
||||
|
||||
# Child offset in expandable widget
|
||||
CHILD_OFFSET = 15
|
||||
|
||||
__all__ = (
|
||||
"LABEL_REMOVE_DEFAULT",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from Qt import QtWidgets, QtCore
|
||||
from openpype.tools.settings.settings.widgets.widgets import (
|
||||
from openpype.tools.settings.settings.widgets import (
|
||||
ExpandingWidget,
|
||||
SpacerWidget
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from openpype.settings.lib import (
|
|||
get_local_settings,
|
||||
save_local_settings
|
||||
)
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
from openpype.api import (
|
||||
SystemSettings,
|
||||
ProjectSettings
|
||||
|
|
@ -23,7 +24,6 @@ from .apps_widget import LocalApplicationsWidgets
|
|||
from .projects_widget import ProjectSettingsWidget
|
||||
|
||||
from .constants import (
|
||||
CHILD_OFFSET,
|
||||
LOCAL_GENERAL_KEY,
|
||||
LOCAL_PROJECTS_KEY,
|
||||
LOCAL_APPS_KEY
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from . import style
|
||||
from .widgets import (
|
||||
MainWidget,
|
||||
ProjectListWidget
|
||||
)
|
||||
from .window import MainWidget
|
||||
from .widgets import ProjectListWidget
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from Qt import QtWidgets, QtGui, QtCore
|
||||
from .lib import CHILD_OFFSET
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
from .widgets import ExpandingWidget
|
||||
|
||||
|
||||
|
|
@ -30,8 +30,6 @@ from openpype.settings.entities import (
|
|||
from openpype.settings import SaveWarningExc
|
||||
from .widgets import ProjectListWidget
|
||||
|
||||
from . import lib
|
||||
|
||||
from .base import GUIWidget
|
||||
from .list_item_widget import ListWidget
|
||||
from .list_strict_widget import ListStrictWidget
|
||||
|
|
@ -8,7 +8,7 @@ from .widgets import (
|
|||
IconButton,
|
||||
SpacerWidget
|
||||
)
|
||||
from .lib import (
|
||||
from openpype.tools.settings import (
|
||||
BTN_FIXED_SIZE,
|
||||
CHILD_OFFSET
|
||||
)
|
||||
|
|
@ -32,14 +32,15 @@ def create_remove_btn(parent):
|
|||
|
||||
|
||||
class ModifiableDictEmptyItem(QtWidgets.QWidget):
|
||||
def __init__(self, entity_widget, parent):
|
||||
def __init__(self, entity_widget, store_as_list, parent):
|
||||
super(ModifiableDictEmptyItem, self).__init__(parent)
|
||||
self.entity_widget = entity_widget
|
||||
self.collapsible_key = entity_widget.entity.collapsible_key
|
||||
self.ignore_input_changes = entity_widget.ignore_input_changes
|
||||
|
||||
self.store_as_list = store_as_list
|
||||
self.is_duplicated = False
|
||||
self.key_is_valid = False
|
||||
self.key_is_valid = store_as_list
|
||||
|
||||
if self.collapsible_key:
|
||||
self.create_collapsible_ui()
|
||||
|
|
@ -101,7 +102,8 @@ class ModifiableDictEmptyItem(QtWidgets.QWidget):
|
|||
|
||||
def _on_key_change(self):
|
||||
key = self.key_input.text()
|
||||
self.key_is_valid = KEY_REGEX.match(key)
|
||||
if not self.store_as_list:
|
||||
self.key_is_valid = KEY_REGEX.match(key)
|
||||
|
||||
if self.ignore_input_changes:
|
||||
return
|
||||
|
|
@ -161,9 +163,11 @@ class ModifiableDictEmptyItem(QtWidgets.QWidget):
|
|||
|
||||
|
||||
class ModifiableDictItem(QtWidgets.QWidget):
|
||||
def __init__(self, collapsible_key, entity, entity_widget):
|
||||
def __init__(self, collapsible_key, store_as_list, entity, entity_widget):
|
||||
super(ModifiableDictItem, self).__init__(entity_widget.content_widget)
|
||||
|
||||
self.store_as_list = store_as_list
|
||||
|
||||
self.collapsible_key = collapsible_key
|
||||
self.entity = entity
|
||||
self.entity_widget = entity_widget
|
||||
|
|
@ -171,7 +175,7 @@ class ModifiableDictItem(QtWidgets.QWidget):
|
|||
self.ignore_input_changes = entity_widget.ignore_input_changes
|
||||
|
||||
self.is_key_duplicated = False
|
||||
self.key_is_valid = False
|
||||
self.key_is_valid = store_as_list
|
||||
self.is_required = False
|
||||
|
||||
self.origin_key = None
|
||||
|
|
@ -401,7 +405,8 @@ class ModifiableDictItem(QtWidgets.QWidget):
|
|||
|
||||
def _on_key_change(self):
|
||||
key = self.key_value()
|
||||
self.key_is_valid = KEY_REGEX.match(key)
|
||||
if not self.store_as_list:
|
||||
self.key_is_valid = KEY_REGEX.match(key)
|
||||
|
||||
if self.ignore_input_changes:
|
||||
return
|
||||
|
|
@ -607,7 +612,7 @@ class DictMutableKeysWidget(BaseWidget):
|
|||
self.add_required_keys()
|
||||
|
||||
self.empty_row = ModifiableDictEmptyItem(
|
||||
self, self.content_widget
|
||||
self, self.entity.store_as_list, self.content_widget
|
||||
)
|
||||
self.content_layout.addWidget(self.empty_row)
|
||||
|
||||
|
|
@ -734,7 +739,8 @@ class DictMutableKeysWidget(BaseWidget):
|
|||
|
||||
def add_widget_for_child(self, child_entity):
|
||||
input_field = ModifiableDictItem(
|
||||
self.entity.collapsible_key, child_entity, self
|
||||
self.entity.collapsible_key, self.entity.store_as_list,
|
||||
child_entity, self
|
||||
)
|
||||
self.input_fields.append(input_field)
|
||||
|
||||
|
|
@ -19,7 +19,7 @@ from .base import (
|
|||
BaseWidget,
|
||||
InputWidget
|
||||
)
|
||||
from .lib import CHILD_OFFSET
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
|
||||
|
||||
class DictImmutableKeysWidget(BaseWidget):
|
||||
|
|
@ -2,7 +2,7 @@ from Qt import QtWidgets, QtCore
|
|||
|
||||
from .base import InputWidget
|
||||
from .widgets import ExpandingWidget
|
||||
from .lib import (
|
||||
from openpype.tools.settings import (
|
||||
BTN_FIXED_SIZE,
|
||||
CHILD_OFFSET
|
||||
)
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
from .window import MainWidget
|
||||
from .widgets import ProjectListWidget
|
||||
|
||||
|
||||
__all__ = [
|
||||
"MainWidget",
|
||||
"ProjectListWidget"
|
||||
]
|
||||
|
|
@ -1,601 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
import copy
|
||||
from openpype.settings.constants import (
|
||||
M_OVERRIDEN_KEY,
|
||||
M_ENVIRONMENT_KEY,
|
||||
M_DYNAMIC_KEY_LABEL
|
||||
)
|
||||
from queue import Queue
|
||||
|
||||
|
||||
# Singleton database of available inputs
|
||||
class TypeToKlass:
|
||||
types = {}
|
||||
|
||||
|
||||
NOT_SET = type("NOT_SET", (), {"__bool__": lambda obj: False})()
|
||||
METADATA_KEY = type("METADATA_KEY", (), {})()
|
||||
OVERRIDE_VERSION = 1
|
||||
CHILD_OFFSET = 15
|
||||
BTN_FIXED_SIZE = 20
|
||||
|
||||
key_pattern = re.compile(r"(\{.*?[^{0]*\})")
|
||||
|
||||
|
||||
def convert_gui_data_with_metadata(data, ignored_keys=None):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
if ignored_keys is None:
|
||||
ignored_keys = tuple()
|
||||
|
||||
output = {}
|
||||
if METADATA_KEY in data:
|
||||
metadata = data.pop(METADATA_KEY)
|
||||
for key, value in metadata.items():
|
||||
if key in ignored_keys or key == "groups":
|
||||
continue
|
||||
|
||||
if key == "environments":
|
||||
output[M_ENVIRONMENT_KEY] = value
|
||||
elif key == "dynamic_key_label":
|
||||
output[M_DYNAMIC_KEY_LABEL] = value
|
||||
else:
|
||||
raise KeyError("Unknown metadata key \"{}\"".format(key))
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_gui_data_with_metadata(value, ignored_keys)
|
||||
return output
|
||||
|
||||
|
||||
def convert_data_to_gui_data(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if M_ENVIRONMENT_KEY in data:
|
||||
data.pop(M_ENVIRONMENT_KEY)
|
||||
|
||||
if M_DYNAMIC_KEY_LABEL in data:
|
||||
if METADATA_KEY not in data:
|
||||
data[METADATA_KEY] = {}
|
||||
data[METADATA_KEY]["dynamic_key_label"] = data.pop(M_DYNAMIC_KEY_LABEL)
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_data_to_gui_data(value, False)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def convert_gui_data_to_overrides(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if first:
|
||||
output["__override_version__"] = OVERRIDE_VERSION
|
||||
data = convert_gui_data_with_metadata(data)
|
||||
|
||||
if METADATA_KEY in data:
|
||||
metadata = data.pop(METADATA_KEY)
|
||||
for key, value in metadata.items():
|
||||
if key == "groups":
|
||||
output[M_OVERRIDEN_KEY] = value
|
||||
else:
|
||||
raise KeyError("Unknown metadata key \"{}\"".format(key))
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_gui_data_to_overrides(value, False)
|
||||
return output
|
||||
|
||||
|
||||
def convert_overrides_to_gui_data(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
if first:
|
||||
data = convert_data_to_gui_data(data)
|
||||
|
||||
output = {}
|
||||
if M_OVERRIDEN_KEY in data:
|
||||
groups = data.pop(M_OVERRIDEN_KEY)
|
||||
if METADATA_KEY not in output:
|
||||
output[METADATA_KEY] = {}
|
||||
output[METADATA_KEY]["groups"] = groups
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_overrides_to_gui_data(value, False)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _fill_schema_template_data(
|
||||
template, template_data, required_keys=None, missing_keys=None
|
||||
):
|
||||
first = False
|
||||
if required_keys is None:
|
||||
first = True
|
||||
required_keys = set()
|
||||
missing_keys = set()
|
||||
|
||||
_template = []
|
||||
default_values = {}
|
||||
for item in template:
|
||||
if isinstance(item, dict) and "__default_values__" in item:
|
||||
default_values = item["__default_values__"]
|
||||
else:
|
||||
_template.append(item)
|
||||
template = _template
|
||||
|
||||
for key, value in default_values.items():
|
||||
if key not in template_data:
|
||||
template_data[key] = value
|
||||
|
||||
if not template:
|
||||
output = template
|
||||
|
||||
elif isinstance(template, list):
|
||||
output = []
|
||||
for item in template:
|
||||
output.append(_fill_schema_template_data(
|
||||
item, template_data, required_keys, missing_keys
|
||||
))
|
||||
|
||||
elif isinstance(template, dict):
|
||||
output = {}
|
||||
for key, value in template.items():
|
||||
output[key] = _fill_schema_template_data(
|
||||
value, template_data, required_keys, missing_keys
|
||||
)
|
||||
|
||||
elif isinstance(template, str):
|
||||
# TODO find much better way how to handle filling template data
|
||||
for replacement_string in key_pattern.findall(template):
|
||||
key = str(replacement_string[1:-1])
|
||||
required_keys.add(key)
|
||||
if key not in template_data:
|
||||
missing_keys.add(key)
|
||||
continue
|
||||
|
||||
value = template_data[key]
|
||||
if replacement_string == template:
|
||||
# Replace the value with value from templates data
|
||||
# - with this is possible to set value with different type
|
||||
template = value
|
||||
else:
|
||||
# Only replace the key in string
|
||||
template = template.replace(replacement_string, value)
|
||||
output = template
|
||||
|
||||
else:
|
||||
output = template
|
||||
|
||||
if first and missing_keys:
|
||||
raise SchemaTemplateMissingKeys(missing_keys, required_keys)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _fill_schema_template(child_data, schema_collection, schema_templates):
|
||||
template_name = child_data["name"]
|
||||
template = schema_templates.get(template_name)
|
||||
if template is None:
|
||||
if template_name in schema_collection:
|
||||
raise KeyError((
|
||||
"Schema \"{}\" is used as `schema_template`"
|
||||
).format(template_name))
|
||||
raise KeyError("Schema template \"{}\" was not found".format(
|
||||
template_name
|
||||
))
|
||||
|
||||
# Default value must be dictionary (NOT list)
|
||||
# - empty list would not add any item if `template_data` are not filled
|
||||
template_data = child_data.get("template_data") or {}
|
||||
if isinstance(template_data, dict):
|
||||
template_data = [template_data]
|
||||
|
||||
output = []
|
||||
for single_template_data in template_data:
|
||||
try:
|
||||
filled_child = _fill_schema_template_data(
|
||||
template, single_template_data
|
||||
)
|
||||
|
||||
except SchemaTemplateMissingKeys as exc:
|
||||
raise SchemaTemplateMissingKeys(
|
||||
exc.missing_keys, exc.required_keys, template_name
|
||||
)
|
||||
|
||||
for item in filled_child:
|
||||
filled_item = _fill_inner_schemas(
|
||||
item, schema_collection, schema_templates
|
||||
)
|
||||
if filled_item["type"] == "schema_template":
|
||||
output.extend(_fill_schema_template(
|
||||
filled_item, schema_collection, schema_templates
|
||||
))
|
||||
else:
|
||||
output.append(filled_item)
|
||||
return output
|
||||
|
||||
|
||||
def _fill_inner_schemas(schema_data, schema_collection, schema_templates):
|
||||
if schema_data["type"] == "schema":
|
||||
raise ValueError("First item in schema data can't be schema.")
|
||||
|
||||
children_key = "children"
|
||||
object_type_key = "object_type"
|
||||
for item_key in (children_key, object_type_key):
|
||||
children = schema_data.get(item_key)
|
||||
if not children:
|
||||
continue
|
||||
|
||||
if object_type_key == item_key:
|
||||
if not isinstance(children, dict):
|
||||
continue
|
||||
children = [children]
|
||||
|
||||
new_children = []
|
||||
for child in children:
|
||||
child_type = child["type"]
|
||||
if child_type == "schema":
|
||||
schema_name = child["name"]
|
||||
if schema_name not in schema_collection:
|
||||
if schema_name in schema_templates:
|
||||
raise KeyError((
|
||||
"Schema template \"{}\" is used as `schema`"
|
||||
).format(schema_name))
|
||||
raise KeyError(
|
||||
"Schema \"{}\" was not found".format(schema_name)
|
||||
)
|
||||
|
||||
filled_child = _fill_inner_schemas(
|
||||
schema_collection[schema_name],
|
||||
schema_collection,
|
||||
schema_templates
|
||||
)
|
||||
|
||||
elif child_type == "schema_template":
|
||||
for filled_child in _fill_schema_template(
|
||||
child, schema_collection, schema_templates
|
||||
):
|
||||
new_children.append(filled_child)
|
||||
continue
|
||||
|
||||
else:
|
||||
filled_child = _fill_inner_schemas(
|
||||
child, schema_collection, schema_templates
|
||||
)
|
||||
|
||||
new_children.append(filled_child)
|
||||
|
||||
if item_key == object_type_key:
|
||||
if len(new_children) != 1:
|
||||
raise KeyError((
|
||||
"Failed to fill object type with type: {} | name {}"
|
||||
).format(
|
||||
child_type, str(child.get("name"))
|
||||
))
|
||||
new_children = new_children[0]
|
||||
|
||||
schema_data[item_key] = new_children
|
||||
return schema_data
|
||||
|
||||
|
||||
class SchemaTemplateMissingKeys(Exception):
|
||||
def __init__(self, missing_keys, required_keys, template_name=None):
|
||||
self.missing_keys = missing_keys
|
||||
self.required_keys = required_keys
|
||||
if template_name:
|
||||
msg = f"Schema template \"{template_name}\" require more keys.\n"
|
||||
else:
|
||||
msg = ""
|
||||
msg += "Required keys: {}\nMissing keys: {}".format(
|
||||
self.join_keys(required_keys),
|
||||
self.join_keys(missing_keys)
|
||||
)
|
||||
super(SchemaTemplateMissingKeys, self).__init__(msg)
|
||||
|
||||
def join_keys(self, keys):
|
||||
return ", ".join([
|
||||
f"\"{key}\"" for key in keys
|
||||
])
|
||||
|
||||
|
||||
class SchemaMissingFileInfo(Exception):
|
||||
def __init__(self, invalid):
|
||||
full_path_keys = []
|
||||
for item in invalid:
|
||||
full_path_keys.append("\"{}\"".format("/".join(item)))
|
||||
|
||||
msg = (
|
||||
"Schema has missing definition of output file (\"is_file\" key)"
|
||||
" for keys. [{}]"
|
||||
).format(", ".join(full_path_keys))
|
||||
super(SchemaMissingFileInfo, self).__init__(msg)
|
||||
|
||||
|
||||
class SchemeGroupHierarchyBug(Exception):
|
||||
def __init__(self, invalid):
|
||||
full_path_keys = []
|
||||
for item in invalid:
|
||||
full_path_keys.append("\"{}\"".format("/".join(item)))
|
||||
|
||||
msg = (
|
||||
"Items with attribute \"is_group\" can't have another item with"
|
||||
" \"is_group\" attribute as child. Error happened for keys: [{}]"
|
||||
).format(", ".join(full_path_keys))
|
||||
super(SchemeGroupHierarchyBug, self).__init__(msg)
|
||||
|
||||
|
||||
class SchemaDuplicatedKeys(Exception):
|
||||
def __init__(self, invalid):
|
||||
items = []
|
||||
for key_path, keys in invalid.items():
|
||||
joined_keys = ", ".join([
|
||||
"\"{}\"".format(key) for key in keys
|
||||
])
|
||||
items.append("\"{}\" ({})".format(key_path, joined_keys))
|
||||
|
||||
msg = (
|
||||
"Schema items contain duplicated keys in one hierarchy level. {}"
|
||||
).format(" || ".join(items))
|
||||
super(SchemaDuplicatedKeys, self).__init__(msg)
|
||||
|
||||
|
||||
class SchemaDuplicatedEnvGroupKeys(Exception):
|
||||
def __init__(self, invalid):
|
||||
items = []
|
||||
for key_path, keys in invalid.items():
|
||||
joined_keys = ", ".join([
|
||||
"\"{}\"".format(key) for key in keys
|
||||
])
|
||||
items.append("\"{}\" ({})".format(key_path, joined_keys))
|
||||
|
||||
msg = (
|
||||
"Schema items contain duplicated environment group keys. {}"
|
||||
).format(" || ".join(items))
|
||||
super(SchemaDuplicatedEnvGroupKeys, self).__init__(msg)
|
||||
|
||||
|
||||
def file_keys_from_schema(schema_data):
|
||||
output = []
|
||||
item_type = schema_data["type"]
|
||||
klass = TypeToKlass.types[item_type]
|
||||
if not klass.is_input_type:
|
||||
return output
|
||||
|
||||
keys = []
|
||||
key = schema_data.get("key")
|
||||
if key:
|
||||
keys.append(key)
|
||||
|
||||
for child in schema_data["children"]:
|
||||
if child.get("is_file"):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.append(child["key"])
|
||||
output.append(_keys)
|
||||
continue
|
||||
|
||||
for result in file_keys_from_schema(child):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.extend(result)
|
||||
output.append(_keys)
|
||||
return output
|
||||
|
||||
|
||||
def validate_all_has_ending_file(schema_data, is_top=True):
|
||||
item_type = schema_data["type"]
|
||||
klass = TypeToKlass.types[item_type]
|
||||
if not klass.is_input_type:
|
||||
return None
|
||||
|
||||
if schema_data.get("is_file"):
|
||||
return None
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return [[schema_data["key"]]]
|
||||
|
||||
invalid = []
|
||||
keyless = "key" not in schema_data
|
||||
for child in children:
|
||||
result = validate_all_has_ending_file(child, False)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
if keyless:
|
||||
invalid.extend(result)
|
||||
else:
|
||||
for item in result:
|
||||
new_invalid = [schema_data["key"]]
|
||||
new_invalid.extend(item)
|
||||
invalid.append(new_invalid)
|
||||
|
||||
if not invalid:
|
||||
return None
|
||||
|
||||
if not is_top:
|
||||
return invalid
|
||||
|
||||
raise SchemaMissingFileInfo(invalid)
|
||||
|
||||
|
||||
def validate_is_group_is_unique_in_hierarchy(
|
||||
schema_data, any_parent_is_group=False, keys=None
|
||||
):
|
||||
is_top = keys is None
|
||||
if keys is None:
|
||||
keys = []
|
||||
|
||||
keyless = "key" not in schema_data
|
||||
|
||||
if not keyless:
|
||||
keys.append(schema_data["key"])
|
||||
|
||||
invalid = []
|
||||
is_group = schema_data.get("is_group")
|
||||
if is_group and any_parent_is_group:
|
||||
invalid.append(copy.deepcopy(keys))
|
||||
|
||||
if is_group:
|
||||
any_parent_is_group = is_group
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return invalid
|
||||
|
||||
for child in children:
|
||||
result = validate_is_group_is_unique_in_hierarchy(
|
||||
child, any_parent_is_group, copy.deepcopy(keys)
|
||||
)
|
||||
if not result:
|
||||
continue
|
||||
|
||||
invalid.extend(result)
|
||||
|
||||
if invalid and is_group and keys not in invalid:
|
||||
invalid.append(copy.deepcopy(keys))
|
||||
|
||||
if not is_top:
|
||||
return invalid
|
||||
|
||||
if invalid:
|
||||
raise SchemeGroupHierarchyBug(invalid)
|
||||
|
||||
|
||||
def validate_keys_are_unique(schema_data, keys=None):
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return
|
||||
|
||||
is_top = keys is None
|
||||
if keys is None:
|
||||
keys = [schema_data["key"]]
|
||||
else:
|
||||
keys.append(schema_data["key"])
|
||||
|
||||
child_queue = Queue()
|
||||
for child in children:
|
||||
child_queue.put(child)
|
||||
|
||||
child_inputs = []
|
||||
while not child_queue.empty():
|
||||
child = child_queue.get()
|
||||
if "key" not in child:
|
||||
_children = child.get("children") or []
|
||||
for _child in _children:
|
||||
child_queue.put(_child)
|
||||
else:
|
||||
child_inputs.append(child)
|
||||
|
||||
duplicated_keys = set()
|
||||
child_keys = set()
|
||||
for child in child_inputs:
|
||||
key = child["key"]
|
||||
if key in child_keys:
|
||||
duplicated_keys.add(key)
|
||||
else:
|
||||
child_keys.add(key)
|
||||
|
||||
invalid = {}
|
||||
if duplicated_keys:
|
||||
joined_keys = "/".join(keys)
|
||||
invalid[joined_keys] = duplicated_keys
|
||||
|
||||
for child in child_inputs:
|
||||
result = validate_keys_are_unique(child, copy.deepcopy(keys))
|
||||
if result:
|
||||
invalid.update(result)
|
||||
|
||||
if not is_top:
|
||||
return invalid
|
||||
|
||||
if invalid:
|
||||
raise SchemaDuplicatedKeys(invalid)
|
||||
|
||||
|
||||
def validate_environment_groups_uniquenes(
|
||||
schema_data, env_groups=None, keys=None
|
||||
):
|
||||
is_first = False
|
||||
if env_groups is None:
|
||||
is_first = True
|
||||
env_groups = {}
|
||||
keys = []
|
||||
|
||||
my_keys = copy.deepcopy(keys)
|
||||
key = schema_data.get("key")
|
||||
if key:
|
||||
my_keys.append(key)
|
||||
|
||||
env_group_key = schema_data.get("env_group_key")
|
||||
if env_group_key:
|
||||
if env_group_key not in env_groups:
|
||||
env_groups[env_group_key] = []
|
||||
env_groups[env_group_key].append("/".join(my_keys))
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return
|
||||
|
||||
for child in children:
|
||||
validate_environment_groups_uniquenes(
|
||||
child, env_groups, copy.deepcopy(my_keys)
|
||||
)
|
||||
|
||||
if is_first:
|
||||
invalid = {}
|
||||
for env_group_key, key_paths in env_groups.items():
|
||||
if len(key_paths) > 1:
|
||||
invalid[env_group_key] = key_paths
|
||||
|
||||
if invalid:
|
||||
raise SchemaDuplicatedEnvGroupKeys(invalid)
|
||||
|
||||
|
||||
def validate_schema(schema_data):
|
||||
validate_all_has_ending_file(schema_data)
|
||||
validate_is_group_is_unique_in_hierarchy(schema_data)
|
||||
validate_keys_are_unique(schema_data)
|
||||
validate_environment_groups_uniquenes(schema_data)
|
||||
|
||||
|
||||
def gui_schema(subfolder, main_schema_name):
|
||||
subfolder, main_schema_name
|
||||
dirpath = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)),
|
||||
"gui_schemas",
|
||||
subfolder
|
||||
)
|
||||
|
||||
loaded_schemas = {}
|
||||
loaded_schema_templates = {}
|
||||
for root, _, filenames in os.walk(dirpath):
|
||||
for filename in filenames:
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext != ".json":
|
||||
continue
|
||||
|
||||
filepath = os.path.join(root, filename)
|
||||
with open(filepath, "r") as json_stream:
|
||||
try:
|
||||
schema_data = json.load(json_stream)
|
||||
except Exception as exc:
|
||||
raise Exception((
|
||||
f"Unable to parse JSON file {filepath}\n{exc}"
|
||||
)) from exc
|
||||
if isinstance(schema_data, list):
|
||||
loaded_schema_templates[basename] = schema_data
|
||||
else:
|
||||
loaded_schemas[basename] = schema_data
|
||||
|
||||
main_schema = _fill_inner_schemas(
|
||||
loaded_schemas[main_schema_name],
|
||||
loaded_schemas,
|
||||
loaded_schema_templates
|
||||
)
|
||||
validate_schema(main_schema)
|
||||
return main_schema
|
||||
|
|
@ -5,7 +5,7 @@ from .categories import (
|
|||
ProjectWidget
|
||||
)
|
||||
from .widgets import ShadowWidget
|
||||
from .. import style
|
||||
from . import style
|
||||
|
||||
from openpype.tools.settings import (
|
||||
is_password_required,
|
||||
|
|
@ -5,7 +5,7 @@ from .widgets import (
|
|||
ExpandingWidget,
|
||||
GridLabelWidget
|
||||
)
|
||||
from .lib import CHILD_OFFSET
|
||||
from openpype.tools.settings import CHILD_OFFSET
|
||||
|
||||
|
||||
class WrapperWidget(QtWidgets.QWidget):
|
||||
|
|
@ -255,9 +255,9 @@ class FamilyWidget(QtWidgets.QWidget):
|
|||
defaults = list(plugin.defaults)
|
||||
|
||||
# Replace
|
||||
compare_regex = re.compile(
|
||||
subset_name.replace(user_input_text, "(.+)")
|
||||
)
|
||||
compare_regex = re.compile(re.sub(
|
||||
user_input_text, "(.+)", subset_name, flags=re.IGNORECASE
|
||||
))
|
||||
subset_hints = set()
|
||||
if user_input_text:
|
||||
for _name in existing_subset_names:
|
||||
|
|
|
|||
|
|
@ -97,9 +97,9 @@ url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.0-windows.zip"
|
|||
hash = "fd2e00278e01e85dcee7b4a6969d1a16f13016ec16700fb0366dbb1b1f3c37ad"
|
||||
|
||||
[openpype.thirdparty.oiio.linux]
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-linux.tgz"
|
||||
hash = "sha256:..."
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.12-linux.tgz"
|
||||
hash = "de63a8bf7f6c45ff59ecafeba13123f710c2cbc1783ec9e0b938e980d4f5c37f"
|
||||
|
||||
[openpype.thirdparty.oiio.darwin]
|
||||
url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz"
|
||||
hash = "sha256:..."
|
||||
hash = "sha256:..."
|
||||
|
|
|
|||
|
|
@ -74,11 +74,19 @@ function Install-Poetry() {
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -65,7 +73,7 @@ detect_python () {
|
|||
local version_command
|
||||
version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version
|
||||
python_version="$(python3 <<< ${version_command})"
|
||||
python_version="$(python <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
|
|
@ -77,7 +85,7 @@ detect_python () {
|
|||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
|
|
@ -123,7 +131,7 @@ realpath () {
|
|||
install_poetry () {
|
||||
echo -e "${BIGreen}>>>${RST} Installing Poetry ..."
|
||||
command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; }
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 -
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
}
|
||||
|
||||
# Main
|
||||
|
|
@ -138,7 +146,7 @@ main () {
|
|||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);"
|
||||
openpype_version="$(python3 <<< ${version_command})"
|
||||
openpype_version="$(python <<< ${version_command})"
|
||||
|
||||
_inside_openpype_tool="1"
|
||||
|
||||
|
|
@ -169,11 +177,11 @@ main () {
|
|||
|
||||
echo -e "${BIGreen}>>>${RST} Building ..."
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
poetry run python3 "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
poetry run python "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
poetry run python3 "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
poetry run python "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; }
|
||||
fi
|
||||
poetry run python3 "$openpype_root/tools/build_dependencies.py"
|
||||
poetry run python "$openpype_root/tools/build_dependencies.py"
|
||||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# fix code signing issue
|
||||
|
|
|
|||
|
|
@ -95,11 +95,21 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
|
||||
"@
|
||||
if (-not (Test-Path 'env:_INSIDE_OPENPYPE_TOOL')) {
|
||||
|
|
|
|||
|
|
@ -7,11 +7,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -81,7 +89,7 @@ done
|
|||
detect_python () {
|
||||
echo -e "${BIGreen}>>>${RST} Using python \c"
|
||||
local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version="$(python3 <<< ${version_command})"
|
||||
local python_version="$(python <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
|
|
@ -93,15 +101,16 @@ detect_python () {
|
|||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
command -v python >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
install_poetry () {
|
||||
echo -e "${BIGreen}>>>${RST} Installing Poetry ..."
|
||||
export POETRY_HOME="$openpype_root/.poetry"
|
||||
command -v curl >/dev/null 2>&1 || { echo -e "${BIRed}!!!${RST}${BIYellow} Missing ${RST}${BIBlue}curl${BIYellow} command.${RST}"; return 1; }
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 -
|
||||
export PATH="$PATH:$HOME/.poetry/bin"
|
||||
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
export PATH="$PATH:$POETRY_HOME/bin"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
|
|
@ -177,7 +186,7 @@ main () {
|
|||
# cx_freeze will crash on missing __pychache__ on these but
|
||||
# reinstalling them solves the problem.
|
||||
echo -e "${BIGreen}>>>${RST} Fixing pycache bug ..."
|
||||
poetry run python -m pip install --upgrade pip
|
||||
poetry run python -m pip install --force-reinstall pip
|
||||
poetry run pip install --force-reinstall setuptools
|
||||
poetry run pip install --force-reinstall wheel
|
||||
poetry run python -m pip install --force-reinstall pip
|
||||
|
|
|
|||
|
|
@ -50,11 +50,19 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
65
tools/docker_build.sh
Executable file
|
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Colors for terminal
|
||||
|
||||
RST='\033[0m' # Text Reset
|
||||
BIGreen='\033[1;92m' # Green
|
||||
BIYellow='\033[1;93m' # Yellow
|
||||
BIRed='\033[1;91m' # Red
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Path to resolve
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
realpath () {
|
||||
echo $(cd $(dirname "$1"); pwd)/$(basename "$1")
|
||||
}
|
||||
|
||||
# Main
|
||||
main () {
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
echo -e "${BIYellow}---${RST} Cleaning build directory ..."
|
||||
rm -rf "$openpype_root/build" && mkdir "$openpype_root/build" > /dev/null
|
||||
|
||||
version_command="import os;exec(open(os.path.join('$openpype_root', 'openpype', 'version.py')).read());print(__version__);"
|
||||
openpype_version="$(python3 <<< ${version_command})"
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Running docker build ..."
|
||||
docker build --pull --no-cache -t pypeclub/openpype:$openpype_version .
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Docker build failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Copying build from container ..."
|
||||
echo -e "${BIYellow}---${RST} Creating container from pypeclub/openpype:$openpype_version ..."
|
||||
id="$(docker create -ti pypeclub/openpype:$openpype_version bash)"
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Cannot create just built container."
|
||||
return 1
|
||||
fi
|
||||
echo -e "${BIYellow}---${RST} Copying ..."
|
||||
docker cp "$id:/opt/openpype/build/exe.linux-x86_64-3.7" "$openpype_root/build"
|
||||
if [ $? -ne 0 ] ; then
|
||||
echo -e "${BIRed}!!!${RST} Copying failed."
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Fixing user ownership ..."
|
||||
username="$(logname)"
|
||||
chown -R $username ./build
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} All done, you can delete container:"
|
||||
echo -e "${BIYellow}$id${RST}"
|
||||
}
|
||||
|
||||
return_code=0
|
||||
main || return_code=$?
|
||||
exit $return_code
|
||||
|
|
@ -5,13 +5,20 @@
|
|||
|
||||
art () {
|
||||
cat <<-EOF
|
||||
____________
|
||||
/\\ ___ \\
|
||||
\\ \\ \\/_\\ \\
|
||||
\\ \\ _____/ ______ ___ ___ ___
|
||||
\\ \\ \\___/ /\\ \\ \\ \\\\ \\\\ \\
|
||||
\\ \\____\\ \\ \\_____\\ \\__\\\\__\\\\__\\
|
||||
\\/____/ \\/_____/ . PYPE Club .
|
||||
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -51,53 +58,6 @@ BICyan='\033[1;96m' # Cyan
|
|||
BIWhite='\033[1;97m' # White
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Detect required version of python
|
||||
# Globals:
|
||||
# colors
|
||||
# PYTHON
|
||||
# Arguments:
|
||||
# None
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
detect_python () {
|
||||
echo -e "${BIGreen}>>>${RST} Using python \c"
|
||||
local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))"
|
||||
local python_version="$(python3 <<< ${version_command})"
|
||||
oIFS="$IFS"
|
||||
IFS=.
|
||||
set -- $python_version
|
||||
IFS="$oIFS"
|
||||
if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then
|
||||
if [ "$2" -gt "7" ] ; then
|
||||
echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1;
|
||||
else
|
||||
echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}"
|
||||
fi
|
||||
PYTHON="python3"
|
||||
else
|
||||
command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; }
|
||||
fi
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$pype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
|
|
@ -140,7 +100,7 @@ main () {
|
|||
pushd "$openpype_root" > /dev/null || return > /dev/null
|
||||
|
||||
echo -e "${BIGreen}>>>${RST} Running Pype tool ..."
|
||||
poetry run python3 "$openpype_root/tools/fetch_thirdparty_libs.py"
|
||||
poetry run python "$openpype_root/tools/fetch_thirdparty_libs.py"
|
||||
}
|
||||
|
||||
main
|
||||
|
|
@ -30,12 +30,19 @@ Set-Location -Path $openpype_root
|
|||
|
||||
$art = @"
|
||||
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,20 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -13,11 +13,19 @@ PS> .\run_mongo.ps1
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -50,23 +58,6 @@ BICyan='\033[1;96m' # Cyan
|
|||
BIWhite='\033[1;97m' # White
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$openpype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
# Globals:
|
||||
|
|
@ -82,9 +73,6 @@ realpath () {
|
|||
|
||||
# Main
|
||||
main () {
|
||||
echo -e "${BGreen}"
|
||||
art
|
||||
echo -e "${RST}"
|
||||
|
||||
# Directories
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
|
|
|
|||
|
|
@ -32,11 +32,19 @@ function Show-PSWarning() {
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Run OpenPype Tray
|
||||
|
||||
|
||||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# Colors for terminal
|
||||
|
||||
RST='\033[0m' # Text Reset
|
||||
|
|
@ -49,22 +35,6 @@ BIPurple='\033[1;95m' # Purple
|
|||
BICyan='\033[1;96m' # Cyan
|
||||
BIWhite='\033[1;97m' # White
|
||||
|
||||
##############################################################################
|
||||
# Clean pyc files in specified directory
|
||||
# Globals:
|
||||
# None
|
||||
# Arguments:
|
||||
# Optional path to clean
|
||||
# Returns:
|
||||
# None
|
||||
###############################################################################
|
||||
clean_pyc () {
|
||||
local path
|
||||
path=$openpype_root
|
||||
echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c"
|
||||
find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete
|
||||
echo -e "${BIGreen}DONE${RST}"
|
||||
}
|
||||
|
||||
##############################################################################
|
||||
# Return absolute path
|
||||
|
|
@ -81,11 +51,6 @@ realpath () {
|
|||
|
||||
# Main
|
||||
main () {
|
||||
echo -e "${BGreen}"
|
||||
art
|
||||
echo -e "${RST}"
|
||||
detect_python || return 1
|
||||
|
||||
# Directories
|
||||
openpype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
|
||||
|
||||
|
|
|
|||
|
|
@ -10,11 +10,19 @@ PS> .\update_submodules.ps1
|
|||
|
||||
$art = @"
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
https://openpype.io
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
"@
|
||||
|
||||
|
|
|
|||
|
|
@ -6,10 +6,19 @@
|
|||
art () {
|
||||
cat <<-EOF
|
||||
|
||||
▒█▀▀▀█ █▀▀█ █▀▀ █▀▀▄ ▒█▀▀█ █░░█ █▀▀█ █▀▀ ▀█▀ ▀█▀ ▀█▀
|
||||
▒█░░▒█ █░░█ █▀▀ █░░█ ▒█▄▄█ █▄▄█ █░░█ █▀▀ ▒█░ ▒█░ ▒█░
|
||||
▒█▄▄▄█ █▀▀▀ ▀▀▀ ▀░░▀ ▒█░░░ ▄▄▄█ █▀▀▀ ▀▀▀ ▄█▄ ▄█▄ ▄█▄
|
||||
.---= [ by Pype Club ] =---.
|
||||
. . .. . ..
|
||||
_oOOP3OPP3Op_. .
|
||||
.PPpo~· ·· ~2p. ·· ···· · ·
|
||||
·Ppo · .pPO3Op.· · O:· · · ·
|
||||
.3Pp · oP3'· 'P33· · 4 ·· · · · ·· · · ·
|
||||
·~OP 3PO· .Op3 : · ·· _____ _____ _____
|
||||
·P3O · oP3oP3O3P' · · · · / /·/ /·/ /
|
||||
O3:· O3p~ · ·:· · ·/____/·/____/ /____/
|
||||
'P · 3p3· oP3~· ·.P:· · · ·· · · ·· · · ·
|
||||
· ': · Po' ·Opo'· .3O· . o[ by Pype Club ]]]==- - - · ·
|
||||
· '_ .. · . _OP3·· · ·https://openpype.io·· ·
|
||||
~P3·OPPPO3OP~ · ·· ·
|
||||
· ' '· · ·· · · · ·· ·
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
|
|||
103
website/docs/admin_hosts_resolve.md
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
---
|
||||
id: admin_hosts_resolve
|
||||
title: DaVinci Resolve Setup
|
||||
sidebar_label: DaVinci Resolve
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## Resolve requirements
|
||||
Due to the way resolve handles python and python scripts there are a few steps required steps needed to be done on any machine that will be using OpenPype with resolve.
|
||||
|
||||
### Installing Resolve's own python 3.6 interpreter.
|
||||
Resolve uses a hardcoded method to look for the python executable path. All of tho following paths are defined automatically by Python msi installer. We are using Python 3.6.2.
|
||||
|
||||
<Tabs
|
||||
groupId="platforms"
|
||||
defaultValue="win"
|
||||
values={[
|
||||
{label: 'Windows', value: 'win'},
|
||||
{label: 'Linux', value: 'linux'},
|
||||
{label: 'Mac', value: 'mac'},
|
||||
]}>
|
||||
|
||||
<TabItem value="win">
|
||||
|
||||
`%LOCALAPPDATA%\Programs\Python\Python36`
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="linux">
|
||||
|
||||
`/opt/Python/3.6/bin`
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="mac">
|
||||
|
||||
`~/Library/Python/3.6/bin`
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
|
||||
### Installing PySide2 into python 3.6 for correct gui work
|
||||
|
||||
OpenPype is using its own window widget inside Resolve, for that reason PySide2 has to be installed into the python 3.6 (as explained above).
|
||||
|
||||
<Tabs
|
||||
groupId="platforms"
|
||||
defaultValue="win"
|
||||
values={[
|
||||
{label: 'Windows', value: 'win'},
|
||||
{label: 'Linux', value: 'linux'},
|
||||
{label: 'Mac', value: 'mac'},
|
||||
]}>
|
||||
|
||||
<TabItem value="win">
|
||||
|
||||
paste to any terminal of your choice
|
||||
|
||||
```bash
|
||||
%LOCALAPPDATA%\Programs\Python\Python36\python.exe -m pip install PySide2
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="linux">
|
||||
|
||||
paste to any terminal of your choice
|
||||
|
||||
```bash
|
||||
/opt/Python/3.6/bin/python -m pip install PySide2
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="mac">
|
||||
|
||||
paste to any terminal of your choice
|
||||
|
||||
```bash
|
||||
~/Library/Python/3.6/bin/python -m pip install PySide2
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
<div class="row markdown">
|
||||
|
||||
### Set Resolve's Fusion settings for Python 3.6 interpereter
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
|
||||
As it is shown in bellow picture you have to go to Fusion Tab and then in Fusion menu find Fusion Settings. Go to Fusion/Script and find Default Python Version and swith to Python 3.6
|
||||
|
||||
</div>
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||

|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -715,3 +715,40 @@ Once data are marked as Redshift Proxy instance, they can be published - **OpenP
|
|||
|
||||
Published proxy files can be loaded with OpenPype Loader. It will create mesh and attach Redshift Proxy
|
||||
parameters to it - Redshift will then represent proxy with bounding box.
|
||||
|
||||
## Using VRay Proxies
|
||||
|
||||
OpenPype support publishing, loading and using of VRay Proxy in look management. Their underlaying format
|
||||
can be either vrmesh or alembic.
|
||||
|
||||
:::warning vrmesh or alembic and look management
|
||||
Be aware that **vrmesh** cannot be used with looks as it doesn't retain IDs necessary to map shaders to geometry.
|
||||
:::
|
||||
|
||||
### Creating VRay Proxy
|
||||
|
||||
To create VRay Proxy, select geometry you want and - **OpenPype → Create ...** select **VRay Proxy**. Name your
|
||||
subset as you want and press **Create** button.
|
||||
|
||||
This will create `vrayproxy` set for your subset. You can set some options in Attribute editor, mainly if you want
|
||||
export animation instead of single frame.
|
||||
|
||||

|
||||
|
||||
### Publishing VRay Proxies
|
||||
|
||||
VRay Proxy can be published - **OpenPype → Publish ...**. It will publish data as VRays `vrmesh` format and as
|
||||
Alembic file.
|
||||
|
||||
## Using VRay Proxies
|
||||
|
||||
You can load VRay Proxy using loader - **OpenPype → Loader ...**
|
||||
|
||||

|
||||
|
||||
Select your subset and right-click. Select **Import VRay Proxy (vrmesh)** to import it.
|
||||
|
||||
:::note
|
||||
Note that even if it states `vrmesh` in descriptions, if loader finds Alembic published along (default behavior) it will
|
||||
use abc file instead of vrmesh as it is more flexible and without it looks doesn't work.
|
||||
:::
|
||||
|
|
|
|||
216
website/docs/artist_hosts_resolve.md
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
---
|
||||
id: artist_hosts_resolve
|
||||
title: DaVinci Resolve
|
||||
sidebar_label: DaVinci Resolve
|
||||
---
|
||||
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
:::warning
|
||||
Before you are able to start with OpenPype tools in DaVinci Resolve, installation of its own Python 3.6 interpreter and PySide 2 has to be done. Go to [Installation of python and pyside](#installation-of-python-and-pyside) link for more information
|
||||
:::
|
||||
|
||||
|
||||
|
||||
## OpenPype global tools
|
||||
|
||||
- [Work Files](artist_tools.md#workfiles)
|
||||
- [Create](artist_tools.md#creator)
|
||||
- [Load](artist_tools.md#loader)
|
||||
- [Manage (Inventory)](artist_tools.md#inventory)
|
||||
- [Publish](artist_tools.md#publisher)
|
||||
|
||||
|
||||
<div class="row markdown">
|
||||
|
||||
## Creating Shots from timeline items
|
||||
|
||||
Before a clip can be published with [Publisher](artist_tools.md#publisher) timeline item has to be marked with OpenPype metadata markers. This way it is converted to a publishable subset.
|
||||
|
||||
Lets do it step by step.
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
<div class="row markdown">
|
||||
|
||||
### Color clips before opening Create menu
|
||||
|
||||
|
||||
Timeline video clips should be colored to `Chocolate` color for OpenPype to se it as selected for subset creation.
|
||||
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
### Rename timeline track names
|
||||
|
||||
<div class="row markdown">
|
||||
|
||||
|
||||
<div class="col col --6 markdown">
|
||||
|
||||
To be able to work with dynamic subset name, which is based on track names it is recommended to rename those tracks to what type of plates their clips represent. Commonly used ones are `main`, `review`, `fg01`, `fg02`, `bg`, `bg01`, etc. It is completely up to you but we recommend to always have at least `main` plate. For example if a clip is on track **element** and subset family is set to **plate** then the resulting subset name will be **plateElement**
|
||||
|
||||
<br></br>
|
||||
</div>
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
The name of the resulting *subset* can be seen in the **OpenPypeData** marker.
|
||||
<br></br><br></br>
|
||||
</div>
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
Simple track setup where we are only using `main` and `review` track names.
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
An example of used track names. The yellow frame is highlighting vertically aligned clips - which are going to be renamed and grouped together under one asset (shot) name. The concept of vertical renaming will be explained later in [Vertical Synchronization of Subset Attributes](#vertical-synchronization-of-subset-attributes).
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
### Create menu...
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
After all clips which are intended to be converted to publishable instances are colored to `Chocolate` color, you can open OpenPype menu.
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
After the menu widget is opened (it can take while so be patient please :).
|
||||
|
||||
Hit `Create ...` and then set **Use selection** to active and select the family to **Create Publishable Clips**.
|
||||
|
||||
The Subset name can stay as it is, it is not going to be used because each clip will generate it's own name.
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
The new windows that opens, let's you define various attributes for your future subsets and shots.
|
||||
|
||||
Set Rename clips to active if you wish to use different names of shots in pipeline then the original clip names conformed from EDL/XML.
|
||||
|
||||
**Count sequence from** - Start of the shot numbering if `#` is used in one of the keywords
|
||||
|
||||
**Stepping number** - Sequential gaps in the numbering
|
||||
|
||||
As you can see the in `{shot}` key within *Shot Template Keywords* section, you can use `#` symbol do define padding of the number in sequence and where it's going to be used.
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col col--6 markdown">
|
||||
|
||||
Notice the relationship of following sections. Keys from **Shot Template Keywords** sections will be used for formating of templates in **Shot Hierarchy And Rename Settings** section.
|
||||
|
||||
**Shot parent hierarchy** will be forming parents of the asset (shot) *the hidden root for this is project folder*. So for example of this template we will get resulging string `shots/sq01`
|
||||
|
||||
**Clip name template** in context of clip sitting on track name `main` in second position `mainsq01sh020`. This is due track key is hosting `{_track_}` which is inheriting name form timeline track name. Other allowed namespases are:
|
||||
- `{_sequence_}`: timeline name
|
||||
- `{_clip_}`: clip name
|
||||
- `{_trackIndex_}`: position of track on timeline from bottom
|
||||
- `{_clipIndex_}`: clip positon on timeline from left
|
||||
|
||||
</div>
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
### Vertical synchronization of subset attributes
|
||||
|
||||
In case you are only working with two tracks on timeline where `main` track is going to be used as plates for compositors and `review` track holds mp4 clips for offlines and web preview. **Enable vertical sync** can be deactivated.
|
||||
|
||||
In multiple tracks scenario - as mentioned [here](#rename-timeline-track-names) - it is recommended to activate **Enable vertical sync** and define the hero (driving) track to *main*. This will ensure that all of the clips on corresponding to the same shots will have the same publishing parameters.
|
||||
|
||||
<br></br>
|
||||
|
||||
<div class="row markdown">
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
|
||||
<div class="col col--6 markdown">
|
||||
|
||||

|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
## Publishing Shots
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col--6 markdown">
|
||||
|
||||
Once all `Chocolate` colored clips have gone through the [creator](#rcreate-menu), have been colored to `Pink` color and a marker has been created for each of them, it means they have been successfully converted to publishable clips. Now we can run **Publisher** - it's button can be found in the OpenPype menu.
|
||||
|
||||
<br></br>
|
||||
</div>
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col --6 markdown">
|
||||
|
||||

|
||||
Notice that the main track clips and review had been merged into one instance. And since it is main `hero` clip it is also holding all new shot metadata. For that reason it also create secon instance for each with `shot` family. This instance will create all shot hierarchy and pass frame range attributes to shot (asset).
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row markdown">
|
||||
<div class="col --6 markdown">
|
||||
|
||||

|
||||
Also notice how the subset name is formed form a *track* name and *subset family* from previous steps.
|
||||
|
||||
Also important is to notice the asset name in *OpenPypeData* at marker - the name is the same for all **Vertically renamed** shots as they have been grouped together. Unfortunately Resolve is not allowing to rename the clips so the only way to know is to see it in marker's metadata.
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
BIN
website/docs/assets/maya-vray_proxy-loader.jpg
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
website/docs/assets/maya-vray_proxy.jpg
Normal file
|
After Width: | Height: | Size: 66 KiB |
|
After Width: | Height: | Size: 54 KiB |
BIN
website/docs/assets/resolve_create_audio_resolution.png
Normal file
|
After Width: | Height: | Size: 4.1 KiB |
BIN
website/docs/assets/resolve_create_clips.png
Normal file
|
After Width: | Height: | Size: 113 KiB |
BIN
website/docs/assets/resolve_create_object_naming_convention.png
Normal file
|
After Width: | Height: | Size: 4.6 KiB |
BIN
website/docs/assets/resolve_create_renaming_clips.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
|
After Width: | Height: | Size: 4.1 KiB |
BIN
website/docs/assets/resolve_create_subset_name_review_track.png
Normal file
|
After Width: | Height: | Size: 8 KiB |