Merged in feature/PYPE-666_anatomy_context_data (pull request #474)

Feature/PYPE-666 anatomy context data

Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
Jakub Trllo 2020-02-17 13:58:50 +00:00 committed by Milan Kolar
commit 5cd09a84eb
19 changed files with 525 additions and 1533 deletions

View file

@ -1,10 +1,24 @@
"""
"""Collect Anatomy and global anatomy data.
Requires:
None
session -> AVALON_TASK
projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder)
username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
Optional:
comment -> collect_comment *(pyblish.api.CollectorOrder)
intent -> collected in pyblish-lite
Provides:
context -> anatomy (pypeapp.Anatomy)
context -> anatomyData
"""
import os
import json
from avalon import io, api, lib
from pypeapp import Anatomy
import pyblish.api
@ -12,9 +26,52 @@ import pyblish.api
class CollectAnatomy(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
order = pyblish.api.CollectorOrder
order = pyblish.api.CollectorOrder + 0.002
label = "Collect Anatomy"
def process(self, context):
context.data['anatomy'] = Anatomy()
self.log.info("Anatomy templates collected...")
root_path = api.registered_root()
task_name = api.Session["AVALON_TASK"]
project_entity = context.data["projectEntity"]
asset_entity = context.data["assetEntity"]
project_name = project_entity["name"]
context.data["anatomy"] = Anatomy(project_name)
self.log.info(
"Anatomy object collected for project \"{}\".".format(project_name)
)
hierarchy_items = asset_entity["data"]["parents"]
hierarchy = ""
if hierarchy_items:
hierarchy = os.path.join(*hierarchy_items)
context_data = {
"root": root_path,
"project": {
"name": project_name,
"code": project_entity["data"].get("code")
},
"asset": asset_entity["name"],
"hierarchy": hierarchy.replace("\\", "/"),
"task": task_name,
"username": context.data["user"]
}
avalon_app_name = os.environ.get("AVALON_APP_NAME")
if avalon_app_name:
application_def = lib.get_application(avalon_app_name)
app_dir = application_def.get("application_dir")
if app_dir:
context_data["app"] = app_dir
datetime_data = context.data.get("datetimeData") or {}
context_data.update(datetime_data)
context.data["anatomyData"] = context_data
self.log.info("Global anatomy Data collected")
self.log.debug(json.dumps(context_data, indent=4))

View file

@ -0,0 +1,46 @@
"""Collect Anatomy and global anatomy data.
Requires:
session -> AVALON_PROJECT, AVALON_ASSET
Provides:
context -> projectEntity - project entity from database
context -> assetEntity - asset entity from database
"""
from avalon import io, api
import pyblish.api
class CollectAvalonEntities(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
order = pyblish.api.CollectorOrder
label = "Collect Avalon Entities"
def process(self, context):
project_name = api.Session["AVALON_PROJECT"]
asset_name = api.Session["AVALON_ASSET"]
project_entity = io.find_one({
"type": "project",
"name": project_name
})
assert project_entity, (
"Project '{0}' was not found."
).format(project_name)
self.log.debug("Collected Project entity \"{}\"".format(project_entity))
asset_entity = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
assert asset_entity, (
"No asset found by the name '{0}' in project '{1}'"
).format(asset_name, project_name)
self.log.debug("Collected Asset entity \"{}\"".format(asset_entity))
context.data["projectEntity"] = project_entity
context.data["assetEntity"] = asset_entity

View file

@ -0,0 +1,124 @@
"""
Requires:
context -> anatomyData
context -> projectEntity
context -> assetEntity
instance -> asset
instance -> subset
instance -> family
Optional:
instance -> version
instance -> resolutionWidth
instance -> resolutionHeight
instance -> fps
Provides:
instance -> projectEntity
instance -> assetEntity
instance -> anatomyData
instance -> version
instance -> latestVersion
"""
import copy
import json
from avalon import io
import pyblish.api
class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
"""Fill templates with data needed for publish"""
order = pyblish.api.CollectorOrder + 0.49
label = "Collect instance anatomy data"
hosts = ["maya", "nuke", "standalonepublisher"]
def process(self, instance):
# get all the stuff from the database
anatomy_data = copy.deepcopy(instance.context.data["anatomyData"])
project_entity = instance.context.data["projectEntity"]
context_asset_entity = instance.context.data["assetEntity"]
asset_name = instance.data["asset"]
# Check if asset name is the same as what is in context
# - they may be different, e.g. in NukeStudio
if context_asset_entity["name"] == asset_name:
asset_entity = context_asset_entity
else:
asset_entity = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
subset_name = instance.data["subset"]
version_number = instance.data.get("version")
latest_version = None
if asset_entity:
subset_entity = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_entity["_id"]
})
if subset_entity is None:
self.log.debug("Subset entity does not exist yet.")
else:
version_entity = io.find_one(
{
"type": "version",
"parent": subset_entity["_id"]
},
sort=[("name", -1)]
)
if version_entity:
latest_version = version_entity["name"]
# If version is not specified for instance or context
if version_number is None:
# TODO we should be able to change default version by studio
# preferences (like start with version number `0`)
version_number = 1
# use latest version (+1) if already any exist
if latest_version is not None:
version_number += int(latest_version)
anatomy_updates = {
"asset": asset_name,
"family": instance.data["family"],
"subset": subset_name,
"version": version_number
}
task_name = instance.data.get("task")
if task_name:
anatomy_updates["task"] = task_name
# Version should not be collected since may be instance
anatomy_data.update(anatomy_updates)
resolution_width = instance.data.get("resolutionWidth")
if resolution_width:
anatomy_data["resolution_width"] = resolution_width
resolution_height = instance.data.get("resolutionHeight")
if resolution_height:
anatomy_data["resolution_height"] = resolution_height
fps = instance.data.get("fps")
if resolution_height:
anatomy_data["fps"] = fps
instance.data["projectEntity"] = project_entity
instance.data["assetEntity"] = asset_entity
instance.data["anatomyData"] = anatomy_data
instance.data["latestVersion"] = latest_version
# TODO should be version number set here?
instance.data["version"] = version_number
self.log.info("Instance anatomy Data collected")
self.log.debug(json.dumps(anatomy_data, indent=4))

View file

@ -1,24 +0,0 @@
"""
Requires:
None
Provides:
context -> projectData
"""
import pyblish.api
import pype.api as pype
class CollectProjectData(pyblish.api.ContextPlugin):
"""Collecting project data from avalon db"""
label = "Collect Project Data"
order = pyblish.api.CollectorOrder - 0.1
hosts = ["nukestudio"]
def process(self, context):
# get project data from avalon db
context.data["projectData"] = pype.get_project()["data"]
return

View file

@ -0,0 +1,60 @@
"""
Requires:
context -> anatomy
context -> anatomyData
Provides:
instance -> publishDir
instance -> resourcesDir
"""
import os
import copy
import pyblish.api
from avalon import api
class CollectResourcesPath(pyblish.api.InstancePlugin):
"""Generate directory path where the files and resources will be stored"""
label = "Collect Resources Path"
order = pyblish.api.CollectorOrder + 0.495
def process(self, instance):
anatomy = instance.context.data["anatomy"]
template_data = copy.deepcopy(instance.data["anatomyData"])
# This is for cases of Deprecated anatomy without `folder`
# TODO remove when all clients have solved this issue
template_data.update({
"frame": "FRAME_TEMP",
"representation": "TEMP"
})
anatomy_filled = anatomy.format(template_data)
if "folder" in anatomy.templates["publish"]:
publish_folder = anatomy_filled["publish"]["folder"]
else:
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
project_name = api.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
).format(project_name))
file_path = anatomy_filled["publish"]["path"]
# Directory
publish_folder = os.path.dirname(file_path)
publish_folder = os.path.normpath(publish_folder)
resources_folder = os.path.join(publish_folder, "resources")
instance.data["publishDir"] = publish_folder
instance.data["resourcesDir"] = resources_folder
self.log.debug("publishDir: \"{}\"".format(publish_folder))
self.log.debug("resourcesDir: \"{}\"".format(resources_folder))

View file

@ -1,119 +0,0 @@
"""
Requires:
session -> AVALON_PROJECT
context -> anatomy (pypeapp.Anatomy)
instance -> subset
instance -> asset
instance -> family
Provides:
instance -> template
instance -> assumedTemplateData
instance -> assumedDestination
"""
import os
from avalon import io, api
import pyblish.api
class CollectTemplates(pyblish.api.InstancePlugin):
"""Fill templates with data needed for publish"""
order = pyblish.api.CollectorOrder + 0.1
label = "Collect and fill Templates"
hosts = ["maya", "nuke", "standalonepublisher"]
def process(self, instance):
# get all the stuff from the database
subset_name = instance.data["subset"]
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += int(version["name"])
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
else:
hierarchy = ""
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy.replace("\\", "/"),
"representation": "TEMP"}
# Add datetime data to template data
datetime_data = instance.context.data.get("datetimeData") or {}
template_data.update(datetime_data)
resolution_width = instance.data.get("resolutionWidth")
resolution_height = instance.data.get("resolutionHeight")
fps = instance.data.get("fps")
if resolution_width:
template_data["resolution_width"] = resolution_width
if resolution_width:
template_data["resolution_height"] = resolution_height
if resolution_width:
template_data["fps"] = fps
instance.data["template"] = template
instance.data["assumedTemplateData"] = template_data
# We take the parent folder of representation 'filepath'
instance.data["assumedDestination"] = os.path.dirname(
(anatomy.format(template_data))["publish"]["path"]
)
self.log.info("Assumed Destination has been created...")
self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"]))
self.log.debug("__ template: `{}`".format(instance.data["template"]))

View file

@ -32,21 +32,15 @@ class ExtractBurnin(pype.api.Extractor):
frame_end = int(instance.data.get("frameEnd") or 1)
duration = frame_end - frame_start + 1
prep_data = {
"username": instance.context.data['user'],
"asset": os.environ['AVALON_ASSET'],
"task": os.environ['AVALON_TASK'],
prep_data = copy.deepcopy(instance.data["anatomyData"])
prep_data.update({
"frame_start": frame_start,
"frame_end": frame_end,
"duration": duration,
"version": int(version),
"comment": instance.context.data.get("comment", ""),
"intent": instance.context.data.get("intent", "")
}
# Add datetime data to preparation data
datetime_data = instance.context.data.get("datetimeData") or {}
prep_data.update(datetime_data)
})
slate_frame_start = frame_start
slate_frame_end = frame_end
@ -64,10 +58,6 @@ class ExtractBurnin(pype.api.Extractor):
"slate_duration": slate_duration
})
# Update data with template data
template_data = instance.data.get("assumedTemplateData") or {}
prep_data.update(template_data)
# get anatomy project
anatomy = instance.context.data['anatomy']

View file

@ -1,417 +0,0 @@
import os
import logging
import shutil
import errno
import pyblish.api
from avalon import api, io
from avalon.vendor import filelink
log = logging.getLogger(__name__)
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = []
exclude_families = ["clip"]
def process(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, ("Incomplete instance \"%s\": "
"Missing reference to staging area." % instance)
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
# Ensure at least one file is set up for transfer in staging dir.
files = instance.data.get("files", [])
assert files, "Instance has no files to transfer"
assert isinstance(files, (list, tuple)), (
"Instance 'files' must be a list, got: {0}".format(files)
)
project = io.find_one({"type": "project"})
asset = io.find_one({
"type": "asset",
"name": ASSET,
"parent": project["_id"]
})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
{"name": True},
sort=[("name", -1)]
)
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
self.log.info("Verifying version from assumed destination")
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
if assumed_version != next_version:
raise AttributeError("Assumed version 'v{0:03d}' does not match"
"next version in database "
"('v{1:03d}')".format(assumed_version,
next_version))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
hierarchy = ""
parents = io.find_one({
"type": 'asset',
"name": ASSET
})['data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
if 'transfers' not in instance.data:
instance.data['transfers'] = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
_, ext = os.path.splitext(collection[0])
assert all(ext == os.path.splitext(name)[1]
for name in collection), (
"Files had varying suffixes, this is a bug"
)
assert not any(os.path.isabs(name) for name in collection)
template_data["representation"] = ext[1:]
for fname in collection:
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled["publish"]["path"]
instance.data["transfers"].append([src, dst])
template = anatomy.templates["publish"]["path"]
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
fname = files
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled["publish"]["path"]
instance.data["transfers"].append([src, dst])
template = anatomy.templates["publish"]["path"]
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data.get("transfers", list())
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
# Produce hardlinked copies
# Note: hardlink can only be produced between two files on the same
# server/disk and editing one of the two will edit both files at once.
# As such it is recommended to only make hardlinks between static files
# to ensure publishes remain safe and non-edited.
hardlinks = instance.data.get("hardlinks", list())
for src, dest in hardlinks:
self.log.info("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
filelink.create(src, dst, filelink.HARDLINK)
def get_subset(self, asset, instance):
subset = io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]
})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "avalon-core:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "avalon-core:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
self.log.debug("Registered root: {}".format(api.registered_root()))
# create relative source path for DB
try:
source = instance.data['source']
except KeyError:
source = context.data["currentFile"]
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
self.log.debug("Source: {}".format(source))
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get("fps")}
# Include optional data if present in
optionals = [
"frameStart", "frameEnd", "step", "handles", "sourceHashes"
]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
return version_data

View file

@ -1,147 +0,0 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile", "plate"]
def process(self, instance):
anatomy = instance.context.data['anatomy']
self.create_destination_template(instance, anatomy)
template_data = instance.data["assumedTemplateData"]
# self.log.info(anatomy.templates)
anatomy_filled = anatomy.format(template_data)
# self.log.info(anatomy_filled)
mock_template = anatomy_filled["publish"]["path"]
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(
os.path.normpath(mock_destination)).replace("\\", "/")
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(
resource["source"]).replace("\\", "/")
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(
mock_destination, fname).replace("\\", "/")
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance, anatomy):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
a_template = anatomy.templates
project = io.find_one(
{"type": "project", "name": project_name},
projection={"config": True, "data": True}
)
template = a_template['publish']['path']
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
padding = int(a_template['render']['padding'])
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = "/".join(hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"frame": ('#' * padding),
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -2,6 +2,7 @@ import os
from os.path import getsize
import logging
import sys
import copy
import clique
import errno
import pyblish.api
@ -100,144 +101,104 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
TASK = instance.data.get("task") or api.Session["AVALON_TASK"]
LOCATION = api.Session["AVALON_LOCATION"]
anatomy_data = instance.data["anatomyData"]
io.install()
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
# for result in context.data["results"]:
# if not result["success"]:
# self.log.debug(result)
# exc_type, exc_value, exc_traceback = result["error_info"]
# extracted_traceback = traceback.extract_tb(exc_traceback)[-1]
# self.log.debug(
# "Error at line {}: \"{}\"".format(
# extracted_traceback[1], result["error"]
# )
# )
# assert all(result["success"] for result in context.data["results"]),(
# "Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
project_entity = instance.data["projectEntity"]
asset_name = instance.data["asset"]
asset_entity = instance.data.get("assetEntity")
if not asset_entity:
asset_entity = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
assert asset_entity, (
"No asset found by the name \"{0}\" in project \"{1}\""
).format(asset_name, project_entity["name"])
instance.data["assetEntity"] = asset_entity
# update anatomy data with asset specific keys
# - name should already been set
hierarchy = ""
parents = asset_entity["data"]["parents"]
if parents:
hierarchy = "/".join(parents)
anatomy_data["hierarchy"] = hierarchy
task_name = instance.data.get("task")
if task_name:
anatomy_data["task"] = task_name
stagingdir = instance.data.get("stagingDir")
if not stagingdir:
self.log.info('''{} is missing reference to staging
directory Will try to get it from
representation'''.format(instance))
self.log.info((
"{0} is missing reference to staging directory."
" Will try to get it from representation."
).format(instance))
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
else:
self.log.debug(
"Establishing staging directory @ {0}".format(stagingdir)
)
# Ensure at least one file is set up for transfer in staging dir.
repres = instance.data.get("representations", None)
repres = instance.data.get("representations")
assert repres, "Instance has no files to transfer"
assert isinstance(repres, (list, tuple)), (
"Instance 'files' must be a list, got: {0}".format(repres)
"Instance 'files' must be a list, got: {0} {1}".format(
str(type(repres)), str(repres)
)
)
# FIXME: io is not initialized at this point for shell host
io.install()
project = io.find_one({"type": "project"})
subset = self.get_subset(asset_entity, instance)
asset = io.find_one({
"type": "asset",
"name": ASSET,
"parent": project["_id"]
})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
{"name": True},
sort=[("name", -1)]
)
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
if instance.data.get('version'):
next_version = int(instance.data.get('version'))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_number = instance.data["version"]
self.log.debug("Next version: v{}".format(version_number))
version_data = self.create_version_data(context, instance)
version_data_instance = instance.data.get('versionData')
if version_data_instance:
version_data.update(version_data_instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
# TODO rename method from `create_version` to
# `prepare_version` or similar...
version = self.create_version(
subset=subset,
version_number=version_number,
data=version_data
)
self.log.debug("Creating version ...")
existing_version = io.find_one({
'type': 'version',
'parent': subset["_id"],
'name': next_version
'name': version_number
})
if existing_version is None:
version_id = io.insert_one(version).inserted_id
else:
# TODO query by _id and
# remove old version and representations but keep their ids
io.update_many({
'type': 'version',
'parent': subset["_id"],
'name': next_version
'name': version_number
}, {'$set': version}
)
version_id = existing_version['_id']
instance.data['version'] = version['name']
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
hierarchy = ""
parents = io.find_one({
"type": 'asset',
"name": ASSET
})['data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
intent = context.data.get("intent")
if intent is not None:
anatomy_data["intent"] = intent
anatomy = instance.context.data['anatomy']
@ -250,31 +211,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data['transfers'] = []
for idx, repre in enumerate(instance.data["representations"]):
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
# create template data for Anatomy
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset.get('silo'),
"task": TASK,
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
# Add datetime data to template data
datetime_data = context.data.get("datetimeData") or {}
template_data.update(datetime_data)
template_data = copy.deepcopy(anatomy_data)
if intent is not None:
template_data["intent"] = intent
resolution_width = repre.get("resolutionWidth")
resolution_height = repre.get("resolutionHeight")
@ -292,6 +232,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
stagingdir = repre['stagingDir']
if repre.get('anatomy_template'):
template_name = repre['anatomy_template']
template = os.path.normpath(
anatomy.templates[template_name]["path"])
@ -322,7 +263,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
template_filled = anatomy_filled[template_name]["path"]
if repre_context is None:
repre_context = template_filled.used_values
test_dest_files.append(
os.path.normpath(template_filled)
)
@ -342,12 +282,15 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
index_frame_start = int(repre.get("frameStart"))
# exception for slate workflow
if "slate" in instance.data["families"]:
if index_frame_start and "slate" in instance.data["families"]:
index_frame_start -= 1
dst_padding_exp = src_padding_exp
dst_start_frame = None
for i in src_collection.indexes:
# TODO 1.) do not count padding in each index iteration
# 2.) do not count dst_padding from src_padding before
# index_frame_start check
src_padding = src_padding_exp % i
src_file_name = "{0}{1}{2}".format(
@ -375,7 +318,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if not dst_start_frame:
dst_start_frame = dst_padding
dst = "{0}{1}{2}".format(
dst_head,
dst_start_frame,
@ -547,14 +489,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
filelink.create(src, dst, filelink.HARDLINK)
def get_subset(self, asset, instance):
subset_name = instance.data["subset"]
subset = io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]
"name": subset_name
})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
self.log.debug("families. %s" % instance.data.get('families'))
self.log.debug(
@ -583,26 +525,21 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
return subset
def create_version(self, subset, version_number, locations, data=None):
def create_version(self, subset, version_number, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "pype:version-3.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
@ -645,6 +582,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"fps": context.data.get(
"fps", instance.data.get("fps"))}
intent = context.data.get("intent")
if intent is not None:
version_data["intent"] = intent
# Include optional data if present in
optionals = [
"frameStart", "frameEnd", "step", "handles",

View file

@ -1,423 +0,0 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
from avalon import api, io
log = logging.getLogger(__name__)
class IntegrateFrames(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Integrate Frames"
order = pyblish.api.IntegratorOrder
families = ["imagesequence"]
family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"]
exclude_families = ["clip"]
def process(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
families = [f for f in instance.data["families"]
for search in self.family_targets
if search in f]
if not families:
return
self.register(instance)
# self.log.info("Integrating Asset in to the database ...")
# self.log.info("instance.data: {}".format(instance.data))
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, ("Incomplete instance \"%s\": "
"Missing reference to staging area." % instance)
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"})
asset = io.find_one({
"type": "asset",
"name": ASSET,
"parent": project["_id"]
})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
{"name": True},
sort=[("name", -1)]
)
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
self.log.info("Verifying version from assumed destination")
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
if assumed_version != next_version:
raise AttributeError("Assumed version 'v{0:03d}' does not match"
"next version in database "
"('v{1:03d}')".format(assumed_version,
next_version))
if instance.data.get('version'):
next_version = int(instance.data.get('version'))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
hierarchy = ""
parents = io.find_one({"type": 'asset', "name": ASSET})[
'data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset.get('silo'),
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
# template_publish = project["config"]["template"]["publish"]
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
if 'transfers' not in instance.data:
instance.data['transfers'] = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
if isinstance(files, list):
src_collections, remainder = clique.assemble(files)
src_collection = src_collections[0]
# Assert that each member has identical suffix
src_head = src_collection.format("{head}")
src_tail = ext = src_collection.format("{tail}")
test_dest_files = list()
for i in [1, 2]:
template_data["representation"] = src_tail[1:]
template_data["frame"] = src_collection.format(
"{padding}") % i
anatomy_filled = anatomy.format(template_data)
test_dest_files.append(anatomy_filled["render"]["path"])
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
dst_head = dst_collection.format("{head}")
dst_tail = dst_collection.format("{tail}")
for i in src_collection.indexes:
src_padding = src_collection.format("{padding}") % i
src_file_name = "{0}{1}{2}".format(
src_head, src_padding, src_tail)
dst_padding = dst_collection.format("{padding}") % i
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
src = os.path.join(stagingdir, src_file_name)
instance.data["transfers"].append([src, dst])
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
template_data.pop("frame", None)
fname = files
self.log.info("fname: {}".format(fname))
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled["render"]["path"]
instance.data["transfers"].append([src, dst])
if ext[1:] not in ["jpeg", "jpg", "mov", "mp4", "wav"]:
template_data["frame"] = "#" * int(anatomy_filled["render"]["padding"])
anatomy_filled = anatomy.format(template_data)
path_to_save = anatomy_filled["render"]["path"]
template = anatomy.templates["render"]["path"]
self.log.debug("path_to_save: {}".format(path_to_save))
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {'path': path_to_save, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": {
"name": PROJECT,
"code": project['data']['code']
},
"task": api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy,
"representation": ext[1:]
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data["transfers"]
for src, dest in transfers:
src = os.path.normpath(src)
dest = os.path.normpath(dest)
if src in dest:
continue
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def get_subset(self, asset, instance):
subset = io.find_one({
"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]
})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "pype:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "pype:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
try:
source = instance.data['source']
except KeyError:
source = context.data["currentFile"]
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["frameStart", "frameEnd", "step",
"handles", "colorspace", "fps", "outputDir"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data.get(key, None)
return version_data

View file

@ -0,0 +1,49 @@
import os
import pyblish.api
class IntegrateResourcesPath(pyblish.api.InstancePlugin):
"""Generate directory path where the files and resources will be stored"""
label = "Integrate Resources Path"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile", "plate"]
def process(self, instance):
resources = instance.data.get("resources") or []
transfers = instance.data.get("transfers") or []
if not resources and not transfers:
self.log.debug(
"Instance does not have `resources` and `transfers`"
)
return
resources_folder = instance.data["resourcesDir"]
# Define resource destination and transfers
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(
resource["source"]).replace("\\", "/")
destination = os.path.join(resources_folder, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(
resources_folder, fname
).replace("\\", "/")
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers

View file

@ -1,43 +0,0 @@
import pyblish.api
import os
class ValidateTemplates(pyblish.api.ContextPlugin):
"""Check if all templates were filled"""
label = "Validate Templates"
order = pyblish.api.ValidatorOrder - 0.1
hosts = ["maya", "houdini", "nuke"]
def process(self, context):
anatomy = context.data["anatomy"]
if not anatomy:
raise RuntimeError("Did not find anatomy")
else:
data = {
"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsx",
"code": "prjX"},
"ext": "exr",
"version": 3,
"task": "animation",
"asset": "sh001",
"app": "maya",
"hierarchy": "ep101/sq01/sh010"}
anatomy_filled = anatomy.format(data)
self.log.info(anatomy_filled)
data = {"root": os.environ["PYPE_STUDIO_PROJECTS_PATH"],
"project": {"name": "D001_projectsy",
"code": "prjY"},
"ext": "abc",
"version": 1,
"task": "lookdev",
"asset": "bob",
"app": "maya",
"hierarchy": "ep101/sq01/bob"}
anatomy_filled = context.data["anatomy"].format(data)
self.log.info(anatomy_filled["work"]["folder"])

View file

@ -35,7 +35,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
"subset": subset,
"asset": os.getenv("AVALON_ASSET", None),
"label": subset,
"publish": False,
"publish": True,
"family": 'workfile',
"families": ['workfile'],
"setMembers": [current_file]

View file

@ -1,6 +1,7 @@
import os
import sys
import json
import copy
import tempfile
import contextlib
import subprocess
@ -330,10 +331,9 @@ class ExtractLook(pype.api.Extractor):
maya_path))
def resource_destination(self, instance, filepath, do_maketx):
anatomy = instance.context.data["anatomy"]
self.create_destination_template(instance, anatomy)
resources_dir = instance.data["resourcesDir"]
# Compute destination location
basename, ext = os.path.splitext(os.path.basename(filepath))
@ -343,7 +343,7 @@ class ExtractLook(pype.api.Extractor):
ext = ".tx"
return os.path.join(
instance.data["assumedDestination"], "resources", basename + ext
resources_dir, basename + ext
)
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
@ -407,97 +407,3 @@ class ExtractLook(pype.api.Extractor):
return converted, COPY, texture_hash
return filepath, COPY, texture_hash
def create_destination_template(self, instance, anatomy):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
a_template = anatomy.templates
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = a_template["publish"]["path"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'").format(asset_name, project_name)
silo = asset.get("silo")
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get("version"):
version_number = int(instance.data.get("version"))
padding = int(a_template["render"]["padding"])
hierarchy = asset["data"]["parents"]
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = "/".join(hierarchy)
template_data = {
"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name, "code": project["data"]["code"]},
"silo": silo,
"family": instance.data["family"],
"asset": asset_name,
"subset": subset_name,
"frame": ("#" * padding),
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP",
}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template
# We take the parent folder of representation 'filepath'
instance.data["assumedDestination"] = os.path.dirname(
anatomy.format(template_data)["publish"]["path"]
)

View file

@ -110,15 +110,7 @@ class ExtractYetiRig(pype.api.Extractor):
self.log.info("Writing metadata file")
# Create assumed destination folder for imageSearchPath
assumed_temp_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
template_formatted = template.format(**assumed_temp_data)
destination_folder = os.path.dirname(template_formatted)
image_search_path = os.path.join(destination_folder, "resources")
image_search_path = os.path.normpath(image_search_path)
image_search_path = resources_dir = instance.data["resourcesDir"]
settings = instance.data.get("rigsettings", None)
if settings:

View file

@ -52,9 +52,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
output_dir = os.path.dirname(path)
self.log.debug('output dir: {}'.format(output_dir))
# get version to instance for integration
instance.data['version'] = instance.context.data.get(
"version", pype.get_version_from_path(nuke.root().name()))
# # get version to instance for integration
# instance.data['version'] = instance.context.data.get(
# "version", pype.get_version_from_path(nuke.root().name()))
self.log.debug('Write Version: %s' % instance.data('version'))
@ -92,16 +92,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
# Add version data to instance
version_data = {
"handles": handle_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"frameStart": first_frame + handle_start,
"frameEnd": last_frame - handle_end,
"version": int(instance.data['version']),
"colorspace": node["colorspace"].value(),
"families": ["render"],
"subset": instance.data["subset"],
"fps": instance.context.data["fps"]
}
instance.data["family"] = "write"

View file

@ -17,7 +17,7 @@ class CollectClips(api.ContextPlugin):
self.log.debug("Created `assetsShared` in context")
context.data["assetsShared"] = dict()
projectdata = context.data["projectData"]
projectdata = context.data["projectEntity"]["data"]
version = context.data.get("version", "001")
sequence = context.data.get("activeSequence")
selection = context.data.get("selection")

View file

@ -2,6 +2,7 @@
import os
import json
import re
import copy
import pyblish.api
import tempfile
from avalon import io, api
@ -75,9 +76,11 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
)
data["source"] = data["sourcePath"]
# WARNING instance should not be created in Extractor!
# create new instance
instance = instance.context.create_instance(**data)
# TODO replace line below with `instance.data["resourcesDir"]`
# when instance is created during collection part
dst_dir = self.resource_destination_dir(instance)
# change paths in effects to files
@ -144,103 +147,109 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
return (v, dst)
def resource_destination_dir(self, instance):
anatomy = instance.context.data['anatomy']
self.create_destination_template(instance, anatomy)
# WARNING this is from `collect_instance_anatomy_data.py`
anatomy_data = copy.deepcopy(instance.context.data["anatomyData"])
project_entity = instance.context.data["projectEntity"]
context_asset_entity = instance.context.data["assetEntity"]
return os.path.join(
instance.data["assumedDestination"],
"resources"
)
def create_destination_template(self, instance, anatomy):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
a_template = anatomy.templates
if context_asset_entity["name"] == asset_name:
asset_entity = context_asset_entity
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
else:
asset_entity = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project_entity["_id"]
})
template = a_template['publish']['path']
# anatomy = instance.context.data['anatomy']
subset_name = instance.data["subset"]
version_number = instance.data.get("version")
latest_version = None
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
if asset_entity:
subset_entity = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset_entity["_id"]
})
if subset_entity is None:
self.log.debug("Subset entity does not exist yet.")
else:
version_entity = io.find_one(
{
"type": "version",
"parent": subset_entity["_id"]
},
sort=[("name", -1)]
)
if version_entity:
latest_version = version_entity["name"]
if version_number is None:
version_number = 1
if latest_version is not None:
version_number += int(latest_version)
anatomy_data.update({
"asset": asset_name,
"family": instance.data["family"],
"subset": subset_name,
"version": version_number
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
resolution_width = instance.data.get("resolutionWidth")
if resolution_width:
anatomy_data["resolution_width"] = resolution_width
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
resolution_height = instance.data.get("resolutionHeight")
if resolution_height:
anatomy_data["resolution_height"] = resolution_height
fps = instance.data.get("fps")
if resolution_height:
anatomy_data["fps"] = fps
instance.data["projectEntity"] = project_entity
instance.data["assetEntity"] = asset_entity
instance.data["anatomyData"] = anatomy_data
instance.data["latestVersion"] = latest_version
instance.data["version"] = version_number
# WARNING this is from `collect_resources_path.py`
anatomy = instance.context.data["anatomy"]
template_data = copy.deepcopy(instance.data["anatomyData"])
# This is for cases of Deprecated anatomy without `folder`
# TODO remove when all clients have solved this issue
template_data.update({
"frame": "FRAME_TEMP",
"representation": "TEMP"
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
anatomy_filled = anatomy.format(template_data)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if "folder" in anatomy.templates["publish"]:
publish_folder = anatomy_filled["publish"]["folder"]
else:
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
project_name = api.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
).format(project_name))
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
file_path = anatomy_filled["publish"]["path"]
# Directory
publish_folder = os.path.dirname(file_path)
padding = int(a_template['render']['padding'])
publish_folder = os.path.normpath(publish_folder)
resources_folder = os.path.join(publish_folder, "resources")
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = "/".join(hierarchy)
instance.data["publishDir"] = publish_folder
instance.data["resourcesDir"] = resources_folder
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"frame": ('#' * padding),
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template
# We take the parent folder of representation 'filepath'
instance.data["assumedDestination"] = os.path.dirname(
anatomy.format(template_data)["publish"]["path"]
)
return resources_folder