mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into feature/PYPE-531_avalon_upstream_and_silos
# Conflicts: # pype/ftrack/lib/avalon_sync.py
This commit is contained in:
commit
067a7c94cb
3330 changed files with 19071 additions and 301680 deletions
20
pype/plugins/global/publish/collect_anatomy.py
Normal file
20
pype/plugins/global/publish/collect_anatomy.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
Provides:
|
||||
context -> anatomy (pypeapp.Anatomy)
|
||||
"""
|
||||
|
||||
from pypeapp import Anatomy
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAnatomy(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy into Context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Anatomy"
|
||||
|
||||
def process(self, context):
|
||||
context.data['anatomy'] = Anatomy()
|
||||
self.log.info("Anatomy templates collected...")
|
||||
|
|
@ -1,3 +1,10 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
Provides:
|
||||
context -> comment (str)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,76 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
import json
|
||||
import logging
|
||||
import clique
|
||||
|
||||
log = logging.getLogger("collector")
|
||||
|
||||
|
||||
class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Context - SA Publish"
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
def process(self, context):
|
||||
# get json paths from os and load them
|
||||
io.install()
|
||||
input_json_path = os.environ.get("SAPUBLISH_INPATH")
|
||||
output_json_path = os.environ.get("SAPUBLISH_OUTPATH")
|
||||
|
||||
# context.data["stagingDir"] = os.path.dirname(input_json_path)
|
||||
context.data["returnJsonPath"] = output_json_path
|
||||
|
||||
with open(input_json_path, "r") as f:
|
||||
in_data = json.load(f)
|
||||
|
||||
asset_name = in_data['asset']
|
||||
family = in_data['family']
|
||||
subset = in_data['subset']
|
||||
|
||||
project = io.find_one({'type': 'project'})
|
||||
asset = io.find_one({
|
||||
'type': 'asset',
|
||||
'name': asset_name
|
||||
})
|
||||
context.data['project'] = project
|
||||
context.data['asset'] = asset
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": asset_name,
|
||||
"label": subset,
|
||||
"name": subset,
|
||||
"family": family,
|
||||
"families": [family, 'ftrack'],
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
self.log.info("parsing data: {}".format(in_data))
|
||||
|
||||
instance.data['destination_list'] = list()
|
||||
instance.data['representations'] = list()
|
||||
instance.data['source'] = 'standalone publisher'
|
||||
|
||||
for component in in_data['representations']:
|
||||
|
||||
component['destination'] = component['files']
|
||||
component['stagingDir'] = component['stagingDir']
|
||||
component['anatomy_template'] = 'render'
|
||||
if isinstance(component['files'], list):
|
||||
collections, remainder = clique.assemble(component['files'])
|
||||
self.log.debug("collecting sequence: {}".format(collections))
|
||||
instance.data["frameStart"] = int(component["frameStart"])
|
||||
instance.data["frameEnd"] = int(component["frameEnd"])
|
||||
instance.data['fps'] = int(component['fps'])
|
||||
|
||||
instance.data["representations"].append(component)
|
||||
|
||||
self.log.info(in_data)
|
||||
|
|
@ -1,3 +1,10 @@
|
|||
"""
|
||||
Requires:
|
||||
context -> currentFile (str)
|
||||
Provides:
|
||||
context -> label (str)
|
||||
"""
|
||||
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -19,4 +26,6 @@ class CollectContextLabel(pyblish.api.ContextPlugin):
|
|||
|
||||
# Set label
|
||||
label = "{host} - {scene}".format(host=host.title(), scene=base)
|
||||
if host == "standalonepublisher":
|
||||
label = host.title()
|
||||
context.data["label"] = label
|
||||
|
|
|
|||
19
pype/plugins/global/publish/collect_current_pype_user.py
Normal file
19
pype/plugins/global/publish/collect_current_pype_user.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import os
|
||||
import getpass
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectCurrentUserPype(pyblish.api.ContextPlugin):
|
||||
"""Inject the currently logged on user into the Context"""
|
||||
|
||||
# Order must be after default pyblish-base CollectCurrentUser
|
||||
order = pyblish.api.CollectorOrder + 0.001
|
||||
label = "Collect Pype User"
|
||||
|
||||
def process(self, context):
|
||||
user = os.getenv("PYPE_USERNAME", "").strip()
|
||||
if not user:
|
||||
return
|
||||
|
||||
context.data["user"] = user
|
||||
self.log.debug("Pype user is \"{}\"".format(user))
|
||||
|
|
@ -1,3 +1,11 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
context -> currentFile (str)
|
||||
"""
|
||||
|
||||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,11 @@
|
|||
"""
|
||||
Requires:
|
||||
environment -> DEADLINE_PATH
|
||||
|
||||
Provides:
|
||||
context -> deadlineUser (str)
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
|
@ -54,4 +62,3 @@ class CollectDeadlineUser(pyblish.api.ContextPlugin):
|
|||
|
||||
self.log.info("Found Deadline user: {}".format(user))
|
||||
context.data['deadlineUser'] = user
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,13 @@
|
|||
"""
|
||||
Requires:
|
||||
environment -> PYPE_PUBLISH_PATHS
|
||||
context -> workspaceDir
|
||||
|
||||
Provides:
|
||||
context -> user (str)
|
||||
instance -> new instance
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
|
|
@ -121,6 +131,12 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
else:
|
||||
root = cwd
|
||||
|
||||
if data.get("ftrack"):
|
||||
f = data.get("ftrack")
|
||||
os.environ["FTRACK_API_USER"] = f["FTRACK_API_USER"]
|
||||
os.environ["FTRACK_API_KEY"] = f["FTRACK_API_KEY"]
|
||||
os.environ["FTRACK_SERVER"] = f["FTRACK_SERVER"]
|
||||
|
||||
metadata = data.get("metadata")
|
||||
if metadata:
|
||||
session = metadata.get("session")
|
||||
|
|
|
|||
|
|
@ -1,3 +1,11 @@
|
|||
"""
|
||||
Requires:
|
||||
none
|
||||
|
||||
Provides:
|
||||
context -> machine (str)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,11 @@
|
|||
import os
|
||||
import json
|
||||
"""
|
||||
Requires:
|
||||
config_data -> ftrack.output_representation
|
||||
|
||||
Provides:
|
||||
context -> output_repre_config (str)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -9,7 +15,7 @@ class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Config for representation"
|
||||
hosts = ["shell"]
|
||||
hosts = ["shell", "standalonepublisher"]
|
||||
|
||||
def process(self, context):
|
||||
config_data = config.get_presets()["ftrack"]["output_representation"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,12 @@
|
|||
"""
|
||||
Requires:
|
||||
config_data -> colorspace.default
|
||||
config_data -> dataflow.default
|
||||
|
||||
Provides:
|
||||
context -> presets
|
||||
"""
|
||||
|
||||
from pyblish import api
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -5,7 +14,7 @@ from pypeapp import config
|
|||
class CollectPresets(api.ContextPlugin):
|
||||
"""Collect Presets."""
|
||||
|
||||
order = api.CollectorOrder
|
||||
order = api.CollectorOrder - 0.491
|
||||
label = "Collect Presets"
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,15 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
context -> projectData
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
import pype.api as pype
|
||||
|
||||
|
||||
|
||||
class CollectProjectData(pyblish.api.ContextPlugin):
|
||||
"""Collecting project data from avalon db"""
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
label = 'Collect Version'
|
||||
|
||||
def process(self, context):
|
||||
if "standalonepublisher" in context.data.get("host", []):
|
||||
return
|
||||
|
||||
filename = os.path.basename(context.data.get('currentFile'))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,87 @@
|
|||
"""
|
||||
Requires:
|
||||
session -> AVALON_PROJECT
|
||||
context -> anatomy (pypeapp.Anatomy)
|
||||
instance -> subset
|
||||
instance -> asset
|
||||
instance -> family
|
||||
|
||||
import pype.api as pype
|
||||
from pypeapp import Anatomy
|
||||
Provides:
|
||||
instance -> template
|
||||
instance -> assumedTemplateData
|
||||
instance -> assumedDestination
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from avalon import io, api
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectTemplates(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
class CollectTemplates(pyblish.api.InstancePlugin):
|
||||
"""Fill templates with data needed for publish"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Templates"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect and fill Templates"
|
||||
hosts = ["maya", "nuke", "standalonepublisher"]
|
||||
|
||||
def process(self, context):
|
||||
context.data['anatomy'] = Anatomy()
|
||||
self.log.info("Anatomy templates collected...")
|
||||
def process(self, instance):
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
|
||||
template = project["config"]["template"]["publish"]
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset['silo']
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += int(version["name"])
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["template"] = template
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
|
||||
# We take the parent folder of representation 'filepath'
|
||||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
(anatomy.format(template_data))["publish"]["path"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import logging
|
||||
import shutil
|
||||
import clique
|
||||
|
||||
import errno
|
||||
import pyblish.api
|
||||
|
|
@ -25,9 +24,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Integrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["assembly",
|
||||
"yetiRig",
|
||||
"yeticache"]
|
||||
families = ["assembly"]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -41,7 +38,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
if instance.data.get('transfer', True):
|
||||
self.integrate(instance)
|
||||
|
||||
|
||||
def register(self, instance):
|
||||
# Required environment variables
|
||||
PROJECT = api.Session["AVALON_PROJECT"]
|
||||
|
|
@ -158,7 +154,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
# template_publish = project["config"]["template"]["publish"]
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# Find the representations to transfer amongst the files
|
||||
|
|
|
|||
|
|
@ -30,7 +30,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
|||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination)).replace("\\", "/")
|
||||
mock_destination = os.path.abspath(
|
||||
os.path.normpath(mock_destination)).replace("\\", "/")
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
|
|
@ -38,7 +39,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
|||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"]).replace("\\", "/")
|
||||
source_filename = os.path.basename(
|
||||
resource["source"]).replace("\\", "/")
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
|
||||
# Force forward slashes to fix issue with software unable
|
||||
|
|
@ -53,7 +55,8 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
|||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname).replace("\\", "/")
|
||||
fdest = os.path.join(
|
||||
mock_destination, fname).replace("\\", "/")
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ from os.path import getsize
|
|||
import logging
|
||||
import speedcopy
|
||||
import clique
|
||||
import traceback
|
||||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
|
|
@ -64,7 +63,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"plate",
|
||||
"look",
|
||||
"lut",
|
||||
"audio"
|
||||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
|
|
@ -110,7 +111,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
# extracted_traceback[1], result["error"]
|
||||
# )
|
||||
# )
|
||||
# assert all(result["success"] for result in context.data["results"]), (
|
||||
# assert all(result["success"] for result in context.data["results"]),(
|
||||
# "Atomicity not held, aborting.")
|
||||
|
||||
# Assemble
|
||||
|
|
@ -307,7 +308,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if repre.get("frameStart"):
|
||||
frame_start_padding = len(str(
|
||||
repre.get("frameEnd")))
|
||||
index_frame_start = repre.get("frameStart")
|
||||
index_frame_start = int(repre.get("frameStart"))
|
||||
|
||||
dst_padding_exp = src_padding_exp
|
||||
for i in src_collection.indexes:
|
||||
|
|
@ -322,13 +323,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst_padding = dst_padding_exp % index_frame_start
|
||||
index_frame_start += 1
|
||||
|
||||
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
|
||||
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail).replace("..", ".")
|
||||
self.log.debug("destination: `{}`".format(dst))
|
||||
src = os.path.join(stagingdir, src_file_name)
|
||||
self.log.debug("source: {}".format(src))
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail)
|
||||
repre['published_path'] = "{0}{1}{2}".format(dst_head,
|
||||
dst_padding_exp,
|
||||
dst_tail)
|
||||
# for imagesequence version data
|
||||
hashes = '#' * len(dst_padding)
|
||||
dst = os.path.normpath("{0}{1}{2}".format(
|
||||
|
|
@ -357,7 +360,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
src = os.path.join(stagingdir, fname)
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
dst = os.path.normpath(
|
||||
anatomy_filled[template_name]["path"])
|
||||
anatomy_filled[template_name]["path"]).replace("..", ".")
|
||||
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
|
|
@ -440,6 +443,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
src = os.path.normpath(src)
|
||||
dst = os.path.normpath(dst)
|
||||
|
||||
self.log.debug("Copying file .. {} -> {}".format(src, dst))
|
||||
dirname = os.path.dirname(dst)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import json
|
||||
import re
|
||||
from pprint import pprint
|
||||
import logging
|
||||
|
||||
from avalon import api, io
|
||||
|
|
@ -147,7 +146,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"PYPE_ROOT"
|
||||
]
|
||||
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""
|
||||
Deadline specific code separated from :meth:`process` for sake of
|
||||
|
|
@ -192,7 +190,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
|
||||
|
||||
environment = job["Props"].get("Env", {})
|
||||
i = 0
|
||||
for index, key in enumerate(environment):
|
||||
|
|
@ -231,12 +228,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
# Get a submission job
|
||||
data = instance.data.copy()
|
||||
render_job = data.pop("deadlineSubmissionJob")
|
||||
render_job = data.pop("deadlineSubmissionJob", None)
|
||||
submission_type = "deadline"
|
||||
|
||||
if not render_job:
|
||||
# No deadline job. Try Muster: musterSubmissionJob
|
||||
render_job = data.pop("musterSubmissionJob")
|
||||
render_job = data.pop("musterSubmissionJob", None)
|
||||
submission_type = "muster"
|
||||
if not render_job:
|
||||
raise RuntimeError("Can't continue without valid Deadline "
|
||||
|
|
@ -295,11 +292,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# Optional metadata (for debugging)
|
||||
"metadata": {
|
||||
"instance": data,
|
||||
"job": job,
|
||||
"job": render_job,
|
||||
"session": api.Session.copy()
|
||||
}
|
||||
}
|
||||
|
||||
if submission_type == "muster":
|
||||
ftrack = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
"FTRACK_API_KEY": os.environ.get("FTRACK_API_KEY"),
|
||||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER")
|
||||
}
|
||||
metadata.update({"ftrack": ftrack})
|
||||
|
||||
# Ensure output dir exists
|
||||
output_dir = instance.data["outputDir"]
|
||||
if not os.path.isdir(output_dir):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue