mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
aport collect intances update, filter out "clip" family from global plugins, fix extract_post_json, adding some supporting functions to pype.lib and templates and api
This commit is contained in:
parent
5693f8a4d7
commit
95c9c2bc9b
15 changed files with 219 additions and 59 deletions
11
pype/api.py
11
pype/api.py
|
|
@ -39,7 +39,13 @@ from .templates import (
|
|||
set_project_code
|
||||
)
|
||||
|
||||
from .lib import modified_environ, add_tool_to_environment
|
||||
from .lib import (
|
||||
get_project_data,
|
||||
get_asset_data,
|
||||
modified_environ,
|
||||
add_tool_to_environment,
|
||||
get_data_hierarchical_attr
|
||||
)
|
||||
|
||||
from .widgets.message_window import message
|
||||
|
||||
|
|
@ -63,6 +69,8 @@ __all__ = [
|
|||
"reset_data_from_templates",
|
||||
|
||||
# get contextual data
|
||||
"get_project_data",
|
||||
"get_asset_data",
|
||||
"get_project_name",
|
||||
"get_project_code",
|
||||
"get_hierarchy",
|
||||
|
|
@ -75,6 +83,7 @@ __all__ = [
|
|||
"add_tool_to_environment",
|
||||
"set_hierarchy",
|
||||
"set_project_code",
|
||||
"get_data_hierarchical_attr",
|
||||
|
||||
# preloaded templates
|
||||
"Anatomy",
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ def publish(json_data_path, staging_dir=None):
|
|||
forward([
|
||||
sys.executable, "-u"
|
||||
] + args,
|
||||
cwd=os.getenv('PYPE_SETUP_ROOT')
|
||||
cwd=os.getenv('AVALON_WORKDIR').replace("\\", "/")
|
||||
)
|
||||
|
||||
return {"return_json_path": return_json_path}
|
||||
|
|
@ -88,9 +88,12 @@ def context(project, asset, task, app):
|
|||
pype.set_project_code(project_code)
|
||||
hierarchy = pype.get_hierarchy()
|
||||
pype.set_hierarchy(hierarchy)
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in SESSION.items()
|
||||
if isinstance(v, str)}
|
||||
SESSION.update(fix_paths)
|
||||
SESSION.update({"AVALON_HIERARCHY": hierarchy,
|
||||
"AVALON_PROJECTCODE": project_code,
|
||||
"current_dir": os.getcwd()
|
||||
"current_dir": os.getcwd().replace("\\", "/")
|
||||
})
|
||||
|
||||
return SESSION
|
||||
|
|
@ -126,5 +129,12 @@ def register_plugin_path(publish_path):
|
|||
)
|
||||
|
||||
|
||||
@pico.expose()
|
||||
def nuke_test():
|
||||
import nuke
|
||||
n = nuke.createNode("Constant")
|
||||
log.info(n)
|
||||
|
||||
|
||||
app = PicoApp()
|
||||
app.register_module(__name__)
|
||||
|
|
|
|||
19
pype/lib.py
19
pype/lib.py
|
|
@ -378,16 +378,31 @@ def get_asset_data(asset=None):
|
|||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
asset_name = asset or avalon.api.Session["AVALON_ASSET"]
|
||||
document = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
|
||||
data = document.get("data", {})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_data_hierarchical_attr(entity, attr_name):
|
||||
vp_attr = 'visualParent'
|
||||
data = entity['data']
|
||||
value = data.get(attr_name, None)
|
||||
if value is not None:
|
||||
return value
|
||||
elif vp_attr in data:
|
||||
if data[vp_attr] is None:
|
||||
parent_id = entity['parent']
|
||||
else:
|
||||
parent_id = data[vp_attr]
|
||||
parent = io.find_one({"_id": parent_id})
|
||||
return get_data_hierarchical_attr(parent, attr_name)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_avalon_project_config_schema():
|
||||
schema = 'avalon-core:config-1.0'
|
||||
return schema
|
||||
|
|
|
|||
3
pype/nuke/test_atom_server.py
Normal file
3
pype/nuke/test_atom_server.py
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import nuke
|
||||
n = nuke.createNode("Constant")
|
||||
print(n)
|
||||
|
|
@ -4,6 +4,7 @@ from avalon import (
|
|||
io,
|
||||
api as avalon
|
||||
)
|
||||
from pype import api as pype
|
||||
import json
|
||||
|
||||
|
||||
|
|
@ -20,20 +21,35 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect Aport Context"
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
|
||||
def process(self, context):
|
||||
context.data["avalonSession"] = session = avalon.session
|
||||
|
||||
# get json paths from data
|
||||
rqst_json_data_path = context.data['rqst_json_data_path']
|
||||
post_json_data_path = context.data['post_json_data_path']
|
||||
|
||||
context.data["stagingDir"] = \
|
||||
staging_dir = os.path.dirname(post_json_data_path)
|
||||
# get avalon session data and convert \ to /
|
||||
session = avalon.session
|
||||
fix_paths = {k: v.replace("\\", "/") for k, v in session.items()
|
||||
if isinstance(v, str)}
|
||||
session.update(fix_paths)
|
||||
context.data["avalonSession"] = session
|
||||
|
||||
# get stagin directory from recieved path to json
|
||||
context.data["stagingDir"] = \
|
||||
staging_dir = os.path.dirname(
|
||||
post_json_data_path).replace("\\", "/")
|
||||
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
|
||||
# get data from json file recieved
|
||||
with open(rqst_json_data_path) as f:
|
||||
context.data['json_data'] = json_data = json.load(f)
|
||||
assert json_data, "No `data` in json file"
|
||||
|
||||
# get and check host type
|
||||
host = json_data.get("host", None)
|
||||
host_version = json_data.get("hostVersion", None)
|
||||
assert host, "No `host` data in json file"
|
||||
|
|
@ -42,25 +58,49 @@ class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
|||
context.data["hostVersion"] = \
|
||||
session["AVALON_APP_VERSION"] = host_version
|
||||
|
||||
# register pyblish for filtering of hosts in plugins
|
||||
pyblish.api.deregister_all_hosts()
|
||||
pyblish.api.register_host(host)
|
||||
|
||||
# get path to studio templates
|
||||
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
|
||||
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
|
||||
|
||||
# get presets for host
|
||||
presets_dir = os.path.join(templates_dir, "presets", host)
|
||||
assert os.path.exists(presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
|
||||
|
||||
# load all available preset json files
|
||||
preset_data = dict()
|
||||
for file in os.listdir(presets_dir):
|
||||
name, ext = os.path.splitext(file)
|
||||
with open(os.path.join(presets_dir, file)) as prst:
|
||||
preset_data[name] = json.load(prst)
|
||||
|
||||
context.data['presets'] = preset_data
|
||||
assert preset_data, "No `presets` data in json file"
|
||||
self.log.debug("preset_data: {}".format(preset_data))
|
||||
|
||||
# get current file
|
||||
current_file = json_data.get("currentFile", None)
|
||||
assert current_file, "No `currentFile` data in json file"
|
||||
context.data["currentFile"] = current_file
|
||||
|
||||
presets = json_data.get("presets", None)
|
||||
assert presets, "No `presets` data in json file"
|
||||
context.data["presets"] = presets
|
||||
# get project data from avalon
|
||||
project_data = pype.get_project_data()
|
||||
assert project_data, "No `project_data` data in avalon db"
|
||||
context.data["projectData"] = project_data
|
||||
self.log.debug("project_data: {}".format(project_data))
|
||||
|
||||
if not os.path.exists(staging_dir):
|
||||
os.makedirs(staging_dir)
|
||||
|
||||
self.log.info("Context.data are: {}".format(
|
||||
context.data))
|
||||
# get asset data from avalon and fix all paths
|
||||
asset_data = pype.get_asset_data()
|
||||
assert asset_data, "No `asset_data` data in avalon db"
|
||||
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
|
||||
if isinstance(v, str)}
|
||||
context.data["assetData"] = asset_data
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data))
|
||||
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
|
||||
|
||||
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
|
||||
|
||||
self.log.info("avalon.session is: {}".format(avalon.session))
|
||||
# self.log.info("avalon.session is: {}".format(avalon.session))
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ from avalon import (
|
|||
api as avalon
|
||||
)
|
||||
|
||||
from pype import api as pype
|
||||
|
||||
|
||||
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
|
|
@ -20,17 +22,84 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
label = "Collect instances from JSON"
|
||||
order = pyblish.api.CollectorOrder - 0.05
|
||||
order = pyblish.api.CollectorOrder - 0.48
|
||||
|
||||
def process(self, context):
|
||||
a_session = context.data.get("avalonSession")
|
||||
json_data = context.data.get("json_data", None)
|
||||
assert json_data, "No `json_data` data in json file"
|
||||
|
||||
instances_data = json_data.get("instances", None)
|
||||
assert instances_data, "No `instance` data in json file"
|
||||
|
||||
presets = context.data["presets"]
|
||||
rules_tasks = presets["rules_tasks"]
|
||||
|
||||
asset_default = presets["asset_default"]
|
||||
assert instances_data, "No `asset_default` data in json file"
|
||||
|
||||
asset_name = a_session["AVALON_ASSET"]
|
||||
entity = io.find_one({"name": asset_name,
|
||||
"type": "asset"})
|
||||
|
||||
# get frame start > first try from asset data
|
||||
frame_start = context.data["assetData"].get("fstart", None)
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on assetData")
|
||||
# get frame start > second try from parent data
|
||||
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on any parent entity")
|
||||
# get frame start > third try from parent data
|
||||
frame_start = asset_default["fstart"]
|
||||
|
||||
assert frame_start, "No `frame_start` data found, "
|
||||
"please set `fstart` on asset"
|
||||
self.log.debug("frame_start: `{}`".format(frame_start))
|
||||
|
||||
# get handles > first try from asset data
|
||||
handles = context.data["assetData"].get("handles", None)
|
||||
if not handles:
|
||||
# get frame start > second try from parent data
|
||||
handles = pype.get_data_hierarchical_attr(entity, "handles")
|
||||
if not handles:
|
||||
# get frame start > third try from parent data
|
||||
handles = asset_default["handles"]
|
||||
|
||||
assert handles, "No `handles` data found, "
|
||||
"please set `fstart` on asset"
|
||||
self.log.debug("handles: `{}`".format(handles))
|
||||
|
||||
instances = []
|
||||
|
||||
task = a_session["AVALON_TASK"]
|
||||
current_file = os.path.basename(context.data.get("currentFile"))
|
||||
name, ext = os.path.splitext(current_file)
|
||||
|
||||
# get current file host
|
||||
host = a_session["AVALON_APP"]
|
||||
family = "workfile"
|
||||
families = "filesave"
|
||||
subset_name = "{0}_{1}".format(task, family)
|
||||
# Set label
|
||||
label = "{0} - {1} > {2}".format(name, task, families)
|
||||
|
||||
# get working file into instance for publishing
|
||||
instance = context.create_instance(subset_name)
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"task": task,
|
||||
"representation": ext[1:],
|
||||
"host": host,
|
||||
"asset": asset_name,
|
||||
"label": label,
|
||||
"name": name,
|
||||
"family": family,
|
||||
"families": [families],
|
||||
"publish": True,
|
||||
})
|
||||
instances.append(instance)
|
||||
|
||||
for inst in instances_data:
|
||||
# for key, value in inst.items():
|
||||
# self.log.debug('instance[key]: {}'.format(key))
|
||||
|
|
@ -60,6 +129,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"task": task,
|
||||
"fstart": frame_start,
|
||||
"handles": handles,
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": "{0} - {1} > {2}".format(name, task, subset),
|
||||
|
|
@ -67,6 +138,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
|||
"family": inst["family"],
|
||||
"families": [subset],
|
||||
"jsonData": inst,
|
||||
"parents": , # bez tasku
|
||||
"hierarchy": ,
|
||||
"publish": True,
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
import json
|
||||
import clique
|
||||
import pyblish.api
|
||||
|
|
@ -18,21 +19,21 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
instances_data = []
|
||||
for instance in context:
|
||||
|
||||
data = {}
|
||||
iData = {}
|
||||
for key, value in instance.data.items():
|
||||
if isinstance(value, clique.Collection):
|
||||
value = value.format()
|
||||
|
||||
try:
|
||||
json.dumps(value)
|
||||
data[key] = value
|
||||
iData[key] = value
|
||||
except KeyError:
|
||||
msg = "\"{0}\"".format(value)
|
||||
msg += " in instance.data[\"{0}\"]".format(key)
|
||||
msg += " could not be serialized."
|
||||
self.log.debug(msg)
|
||||
|
||||
instances_data.append(data)
|
||||
instances_data.append(iData)
|
||||
|
||||
data["instances"] = instances_data
|
||||
|
||||
|
|
@ -57,11 +58,14 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
pass
|
||||
return value
|
||||
|
||||
if isinstance(data, object):
|
||||
data = dict(data)
|
||||
# self.log.info("1: {}".format(data))
|
||||
|
||||
if not isinstance(data, dict):
|
||||
# self.log.info("2: {}".format(data))
|
||||
return data
|
||||
|
||||
for key, value in data.items():
|
||||
if "records" in key:
|
||||
if key in ["records", "instances", "results"]:
|
||||
# escape all record objects
|
||||
data[key] = None
|
||||
continue
|
||||
|
|
@ -69,7 +73,7 @@ class ExtractJSON(pyblish.api.ContextPlugin):
|
|||
if hasattr(value, '__module__'):
|
||||
# only deals with module objects
|
||||
if "plugins" in value.__module__:
|
||||
# only dealing with plugin objects
|
||||
# only dealing with plugin objects
|
||||
data[key] = str(value.__module__)
|
||||
else:
|
||||
if ".lib." in value.__module__:
|
||||
|
|
|
|||
|
|
@ -13,9 +13,12 @@ class CleanUp(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.IntegratorOrder + 10
|
||||
label = "Clean Up"
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
import tempfile
|
||||
|
||||
staging_dir = instance.data.get("stagingDir", None)
|
||||
|
|
|
|||
|
|
@ -9,8 +9,12 @@ class CollectAssumedDestination(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Collect Assumed Destination"
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -39,8 +39,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"review",
|
||||
"scene",
|
||||
"ass"]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
||||
self.register(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -27,8 +27,12 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
families = ["imagesequence", "render", "write", "source"]
|
||||
|
||||
family_targets = [".frames", ".local", ".review", "imagesequence", "render"]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
if [ef for ef in self.exclude_families
|
||||
if instance.data["family"] in ef]:
|
||||
return
|
||||
|
||||
families = [f for f in instance.data["families"]
|
||||
for search in self.family_targets
|
||||
|
|
@ -244,17 +248,17 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"VERSION": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,5 +8,5 @@ class CollectContextDataPremiera(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
data_path = context.data['json_context_data_path']
|
||||
data_path = context.data['rqst_json_data_path']
|
||||
self.log.info("Context is: {}".format(data_path))
|
||||
|
|
|
|||
6
pype/premiere/README.markdown
Normal file
6
pype/premiere/README.markdown
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
## How to
|
||||
1. start aport server
|
||||
1. a. deregistering path could be used [](http://localhost:4242/pipeline/deregister_plugin_path)
|
||||
2. set aport into correct context by [](http://localhost:4242/pipeline/context?project=jakub_projectx&asset=shot02&task=rotopaint&app=premiera)
|
||||
3. register premiera publish plugin path [](http://localhost:4242/pipeline/register_plugin_path?publish_path=C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/plugins/premiere/publish)
|
||||
4. publish with test json file [](http://localhost:4242/pipeline/publish?json_data_path=C:/Users/hubert/CODE/pype-setup/repos/pype-config/pype/premiere/example_publish_reqst.json)
|
||||
|
|
@ -7,29 +7,14 @@
|
|||
"framerate": "24.0",
|
||||
"host": "premiere",
|
||||
"hostVersion": "2019",
|
||||
"startFrame": 1001,
|
||||
"isRenderedReference": true,
|
||||
"referenceFile": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\reference\\e01\\sequence01\\conform\\jkprx_e01_conform_v001.mov",
|
||||
"presets": {"rules_tasks": {
|
||||
"defaultTasks": ["compositing"],
|
||||
"taskHost":{
|
||||
"compositing": "nuke",
|
||||
"3d": "maya",
|
||||
"roto": "nukeAssist"
|
||||
},
|
||||
"taskSubsets": {
|
||||
"compositing": ["nukescript", "read", "write"],
|
||||
"3d": ["scene", "camera", "imageplane"],
|
||||
"roto": ["nukescript", "read", "write"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"referenceFile": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/reference/e01/sequence01/conform/jkprx_e01_conform_v001.mov",
|
||||
"instances": [
|
||||
{
|
||||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0010",
|
||||
"filePath": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\footage\\raw\\day01\\bbt_test_001_raw.mov",
|
||||
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
|
|
@ -75,7 +60,7 @@
|
|||
"publish": true,
|
||||
"family": "clip",
|
||||
"name": "e01_s010_0020",
|
||||
"filePath": "C:\\Users\\hubert\\_PYPE_testing\\projects\\jakub_projectx\\resources\\footage\\raw\\day01\\bbt_test_001_raw.mov",
|
||||
"filePath": "C:/Users/hubert/_PYPE_testing/projects/jakub_projectx/resources/footage/raw/day01/bbt_test_001_raw.mov",
|
||||
"tags": [
|
||||
{"task": "compositing"},
|
||||
{"task": "roto"},
|
||||
|
|
|
|||
|
|
@ -132,9 +132,9 @@ def get_asset():
|
|||
Raises:
|
||||
log: error
|
||||
"""
|
||||
|
||||
asset = SESSION.get("AVALON_ASSET", None) \
|
||||
or os.getenv("AVALON_ASSET", None)
|
||||
log.info("asset: {}".format(asset))
|
||||
assert asset, log.error("missing `AVALON_ASSET`"
|
||||
"in avalon session "
|
||||
"or os.environ!")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue