merging all standalone plugins to globals

This commit is contained in:
Milan Kolar 2019-05-27 20:49:37 +01:00
parent 9596e33b77
commit 1410c00238
8 changed files with 9 additions and 959 deletions

View file

@ -16,6 +16,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
label = "Collect Context - SA Publish"
order = pyblish.api.CollectorOrder - 0.49
hosts = ["shell"]
def process(self, context):
# get json paths from os and load them

View file

@ -1,41 +0,0 @@
import os
import pyblish.api
try:
import ftrack_api_old as ftrack_api
except Exception:
import ftrack_api
class CollectFtrackApi(pyblish.api.ContextPlugin):
""" Collects an ftrack session and the current task id. """
order = pyblish.api.CollectorOrder
label = "Collect Ftrack Api"
def process(self, context):
# Collect session
session = ftrack_api.Session()
context.data["ftrackSession"] = session
# Collect task
project = os.environ.get('AVALON_PROJECT', '')
asset = os.environ.get('AVALON_ASSET', '')
task = os.environ.get('AVALON_TASK', None)
self.log.debug(task)
if task:
result = session.query('Task where\
project.full_name is "{0}" and\
name is "{1}" and\
parent.name is "{2}"'.format(project, task, asset)).one()
context.data["ftrackTask"] = result
else:
result = session.query('TypedContext where\
project.full_name is "{0}" and\
name is "{1}"'.format(project, asset)).one()
context.data["ftrackEntity"] = result
self.log.info(result)

View file

@ -1,17 +0,0 @@
import pype.api as pype
from pypeapp import Anatomy
import pyblish.api
class CollectTemplates(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder
label = "Collect Templates"
def process(self, context):
# pype.load_data_from_templates()
context.data['anatomy'] = Anatomy()
self.log.info("Anatomy templates collected...")

View file

@ -1,12 +0,0 @@
import pyblish.api
from avalon import api
class CollectTime(pyblish.api.ContextPlugin):
"""Store global time at the time of publish"""
label = "Collect Current Time"
order = pyblish.api.CollectorOrder
def process(self, context):
context.data["time"] = api.time()

View file

@ -1,471 +0,0 @@
import os
import logging
import shutil
import clique
import errno
import pyblish.api
from avalon import api, io
from avalon.vendor import filelink
log = logging.getLogger(__name__)
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Resolve any dependency issius
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
Requirements for instance to be correctly integrated
instance.data['representations'] - must be a list and each member
must be a dictionary with following data:
'files': list of filenames for sequence, string for single file.
Only the filename is allowed, without the folder path.
'stagingDir': "path/to/folder/with/files"
'name': representation name (usually the same as extension)
'ext': file extension
"""
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = ["animation",
"camera",
"look",
"mayaAscii",
"model",
"pointcache",
"vdbcache",
"setdress",
"assembly",
"layout",
"rig",
"vrayproxy",
"yetiRig",
"yeticache",
"nukescript",
"review",
"workfile",
"scene",
"ass"]
exclude_families = ["clip"]
def process(self, instance):
if [ef for ef in self.exclude_families
if instance.data["family"] in ef]:
return
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
self.log.info("instance.data: {}".format(instance.data))
if instance.data.get('transfer', True):
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
if not stagingdir:
self.log.info('''{} is missing reference to staging
directory Will try to get it from
representation'''.format(instance))
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
# Ensure at least one file is set up for transfer in staging dir.
repres = instance.data.get("representations", None)
assert repres, "Instance has no files to transfer"
assert isinstance(repres, (list, tuple)), (
"Instance 'files' must be a list, got: {0}".format(repres)
)
project = io.find_one({"type": "project"})
asset = io.find_one({"type": "asset",
"name": ASSET,
"parent": project["_id"]})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one({"type": "version",
"parent": subset["_id"]},
{"name": True},
sort=[("name", -1)])
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
if instance.data.get('version'):
next_version = int(instance.data.get('version'))
# self.log.info("Verifying version from assumed destination")
# assumed_data = instance.data["assumedTemplateData"]
# assumed_version = assumed_data["version"]
# if assumed_version != next_version:
# raise AttributeError("Assumed version 'v{0:03d}' does not match"
# "next version in database "
# "('v{1:03d}')".format(assumed_version,
# next_version))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
instance.data['version'] = version['name']
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
hierarchy = ""
parents = io.find_one({
"type": 'asset',
"name": ASSET
})['data']['parents']
if parents and len(parents) > 0:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*parents)
template_data = {"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
"silo": asset['silo'],
"task": api.Session["AVALON_TASK"],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": int(version["name"]),
"hierarchy": hierarchy}
anatomy = instance.context.data['anatomy']
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
destination_list = []
template_name = 'publish'
if 'transfers' not in instance.data:
instance.data['transfers'] = []
for idx, repre in enumerate(instance.data["representations"]):
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
files = repre['files']
if repre.get('stagingDir'):
stagingdir = repre['stagingDir']
if repre.get('anatomy_template'):
template_name = repre['anatomy_template']
template = anatomy.templates[template_name]["path"]
if isinstance(files, list):
src_collections, remainder = clique.assemble(files)
self.log.debug(
"dst_collections: {}".format(str(src_collections)))
src_collection = src_collections[0]
# Assert that each member has identical suffix
src_head = src_collection.format("{head}")
src_tail = ext = src_collection.format("{tail}")
test_dest_files = list()
for i in [1, 2]:
template_data["representation"] = repre['ext']
template_data["frame"] = src_collection.format(
"{padding}") % i
anatomy_filled = anatomy.format(template_data)
test_dest_files.append(
anatomy_filled[template_name]["path"])
dst_collections, remainder = clique.assemble(test_dest_files)
dst_collection = dst_collections[0]
dst_head = dst_collection.format("{head}")
dst_tail = dst_collection.format("{tail}")
instance.data["representations"][idx]['published_path'] = dst_collection.format()
for i in src_collection.indexes:
src_padding = src_collection.format("{padding}") % i
src_file_name = "{0}{1}{2}".format(
src_head, src_padding, src_tail)
dst_padding = dst_collection.format("{padding}") % i
dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail)
src = os.path.join(stagingdir, src_file_name)
# src = src_file_name
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
template_data.pop("frame", None)
fname = files
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = repre['ext']
src = os.path.join(stagingdir, fname)
# src = fname
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled[template_name]["path"]
instance.data["transfers"].append([src, dst])
# template = anatomy.templates["publish"]["path"]
instance.data["representations"][idx]['published_path'] = dst
representation = {
"schema": "pype:representation-2.0",
"type": "representation",
"parent": version_id,
"name": repre['name'],
"data": {'path': dst, 'template': template},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"root": root,
"project": {"name": PROJECT,
"code": project['data']['code']},
'task': api.Session["AVALON_TASK"],
"silo": asset['silo'],
"asset": ASSET,
"family": instance.data['family'],
"subset": subset["name"],
"version": version["name"],
"hierarchy": hierarchy,
"representation": repre['ext']
}
}
destination_list.append(dst)
instance.data['destination_list'] = destination_list
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data.get("transfers", list())
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
# Produce hardlinked copies
# Note: hardlink can only be produced between two files on the same
# server/disk and editing one of the two will edit both files at once.
# As such it is recommended to only make hardlinks between static files
# to ensure publishes remain safe and non-edited.
hardlinks = instance.data.get("hardlinks", list())
for src, dest in hardlinks:
self.log.info("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
filelink.create(src, dst, filelink.HARDLINK)
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "avalon-core:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "avalon-core:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
self.log.debug("Registered root: {}".format(api.registered_root()))
# create relative source path for DB
try:
source = instance.data['source']
except KeyError:
source = context.data["currentFile"]
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
self.log.debug("Source: {}".format(source))
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get("fps")}
# Include optional data if present in
optionals = [
"startFrame", "endFrame", "step", "handles", "sourceHashes"
]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
return version_data

View file

@ -1,315 +0,0 @@
import os
import sys
import pyblish.api
import clique
class IntegrateFtrackApi(pyblish.api.InstancePlugin):
""" Commit components to server. """
order = pyblish.api.IntegratorOrder+0.499
label = "Integrate Ftrack Api"
families = ["ftrack"]
def query(self, entitytype, data):
""" Generate a query expression from data supplied.
If a value is not a string, we'll add the id of the entity to the
query.
Args:
entitytype (str): The type of entity to query.
data (dict): The data to identify the entity.
exclusions (list): All keys to exclude from the query.
Returns:
str: String query to use with "session.query"
"""
queries = []
if sys.version_info[0] < 3:
for key, value in data.iteritems():
if not isinstance(value, (basestring, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
else:
for key, value in data.items():
if not isinstance(value, (str, int)):
self.log.info("value: {}".format(value))
if "id" in value.keys():
queries.append(
"{0}.id is \"{1}\"".format(key, value["id"])
)
else:
queries.append("{0} is \"{1}\"".format(key, value))
query = (
"select id from " + entitytype + " where " + " and ".join(queries)
)
self.log.debug(query)
return query
def process(self, instance):
session = instance.context.data["ftrackSession"]
if instance.context.data.get("ftrackTask"):
task = instance.context.data["ftrackTask"]
name = task
parent = task["parent"]
elif instance.context.data.get("ftrackEntity"):
task = None
name = instance.context.data.get("ftrackEntity")['name']
parent = instance.context.data.get("ftrackEntity")
info_msg = "Created new {entity_type} with data: {data}"
info_msg += ", metadata: {metadata}."
# Iterate over components and publish
for data in instance.data.get("ftrackComponentsList", []):
# AssetType
# Get existing entity.
assettype_data = {"short": "upload"}
assettype_data.update(data.get("assettype_data", {}))
self.log.debug("data: {}".format(data))
assettype_entity = session.query(
self.query("AssetType", assettype_data)
).first()
# Create a new entity if none exits.
if not assettype_entity:
assettype_entity = session.create("AssetType", assettype_data)
self.log.debug(
"Created new AssetType with data: ".format(assettype_data)
)
# Asset
# Get existing entity.
asset_data = {
"name": name,
"type": assettype_entity,
"parent": parent,
}
asset_data.update(data.get("asset_data", {}))
asset_entity = session.query(
self.query("Asset", asset_data)
).first()
self.log.info("asset entity: {}".format(asset_entity))
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
asset_metadata = asset_data.pop("metadata", {})
# Create a new entity if none exits.
if not asset_entity:
asset_entity = session.create("Asset", asset_data)
self.log.debug(
info_msg.format(
entity_type="Asset",
data=asset_data,
metadata=asset_metadata
)
)
# Adding metadata
existing_asset_metadata = asset_entity["metadata"]
existing_asset_metadata.update(asset_metadata)
asset_entity["metadata"] = existing_asset_metadata
# AssetVersion
# Get existing entity.
assetversion_data = {
"version": 0,
"asset": asset_entity,
}
if task:
assetversion_data['task'] = task
assetversion_data.update(data.get("assetversion_data", {}))
assetversion_entity = session.query(
self.query("AssetVersion", assetversion_data)
).first()
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
assetversion_metadata = assetversion_data.pop("metadata", {})
# Create a new entity if none exits.
if not assetversion_entity:
assetversion_entity = session.create(
"AssetVersion", assetversion_data
)
self.log.debug(
info_msg.format(
entity_type="AssetVersion",
data=assetversion_data,
metadata=assetversion_metadata
)
)
# Adding metadata
existing_assetversion_metadata = assetversion_entity["metadata"]
existing_assetversion_metadata.update(assetversion_metadata)
assetversion_entity["metadata"] = existing_assetversion_metadata
# Have to commit the version and asset, because location can't
# determine the final location without.
session.commit()
# Component
# Get existing entity.
component_data = {
"name": "main",
"version": assetversion_entity
}
component_data.update(data.get("component_data", {}))
component_entity = session.query(
self.query("Component", component_data)
).first()
component_overwrite = data.get("component_overwrite", False)
location = data.get("component_location", session.pick_location())
# Overwrite existing component data if requested.
if component_entity and component_overwrite:
origin_location = session.query(
"Location where name is \"ftrack.origin\""
).one()
# Removing existing members from location
components = list(component_entity.get("members", []))
components += [component_entity]
for component in components:
for loc in component["component_locations"]:
if location["id"] == loc["location_id"]:
location.remove_component(
component, recursive=False
)
# Deleting existing members on component entity
for member in component_entity.get("members", []):
session.delete(member)
del(member)
session.commit()
# Reset members in memory
if "members" in component_entity.keys():
component_entity["members"] = []
# Add components to origin location
try:
collection = clique.parse(data["component_path"])
except ValueError:
# Assume its a single file
# Changing file type
name, ext = os.path.splitext(data["component_path"])
component_entity["file_type"] = ext
origin_location.add_component(
component_entity, data["component_path"]
)
else:
# Changing file type
component_entity["file_type"] = collection.format("{tail}")
# Create member components for sequence.
for member_path in collection:
size = 0
try:
size = os.path.getsize(member_path)
except OSError:
pass
name = collection.match(member_path).group("index")
member_data = {
"name": name,
"container": component_entity,
"size": size,
"file_type": os.path.splitext(member_path)[-1]
}
component = session.create(
"FileComponent", member_data
)
origin_location.add_component(
component, member_path, recursive=False
)
component_entity["members"].append(component)
# Add components to location.
location.add_component(
component_entity, origin_location, recursive=True
)
data["component"] = component_entity
msg = "Overwriting Component with path: {0}, data: {1}, "
msg += "location: {2}"
self.log.info(
msg.format(
data["component_path"],
component_data,
location
)
)
# Extracting metadata, and adding after entity creation. This is
# due to a ftrack_api bug where you can't add metadata on creation.
component_metadata = component_data.pop("metadata", {})
# Create new component if none exists.
new_component = False
if not component_entity:
component_entity = assetversion_entity.create_component(
data["component_path"],
data=component_data,
location=location
)
data["component"] = component_entity
msg = "Created new Component with path: {0}, data: {1}"
msg += ", metadata: {2}, location: {3}"
self.log.info(
msg.format(
data["component_path"],
component_data,
component_metadata,
location
)
)
new_component = True
# Adding metadata
existing_component_metadata = component_entity["metadata"]
existing_component_metadata.update(component_metadata)
component_entity["metadata"] = existing_component_metadata
# if component_data['name'] = 'ftrackreview-mp4-mp4':
# assetversion_entity["thumbnail_id"]
# Setting assetversion thumbnail
if data.get("thumbnail", False):
assetversion_entity["thumbnail_id"] = component_entity["id"]
# Inform user about no changes to the database.
if (component_entity and not component_overwrite and
not new_component):
data["component"] = component_entity
self.log.info(
"Found existing component, and no request to overwrite. "
"Nothing has been changed."
)
else:
# Commit changes.
session.commit()

View file

@ -1,95 +0,0 @@
import pyblish.api
import os
import json
class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
"""Collect ftrack component data
Add ftrack component list to instance.
"""
order = pyblish.api.IntegratorOrder + 0.48
label = 'Integrate Ftrack Component'
families = ["ftrack"]
family_mapping = {'camera': 'cam',
'look': 'look',
'mayaAscii': 'scene',
'model': 'geo',
'rig': 'rig',
'setdress': 'setdress',
'pointcache': 'cache',
'write': 'img',
'render': 'render',
'nukescript': 'comp',
'review': 'mov'}
def process(self, instance):
self.log.debug('instance {}'.format(instance))
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
family = instance.data['family'].lower()
asset_type = ''
asset_type = self.family_mapping[family]
componentList = []
ft_session = instance.context.data["ftrackSession"]
for comp in instance.data['representations']:
self.log.debug('component {}'.format(comp))
if comp.get('thumbnail'):
location = ft_session.query(
'Location where name is "ftrack.server"').one()
component_data = {
"name": "thumbnail" # Default component name is "main".
}
elif comp.get('preview'):
if not comp.get('startFrameReview'):
comp['startFrameReview'] = comp['startFrame']
if not comp.get('endFrameReview'):
comp['endFrameReview'] = comp['endFrame']
location = ft_session.query(
'Location where name is "ftrack.server"').one()
component_data = {
# Default component name is "main".
"name": "ftrackreview-mp4",
"metadata": {'ftr_meta': json.dumps({
'frameIn': int(comp['startFrameReview']),
'frameOut': int(comp['endFrameReview']),
'frameRate': comp['frameRate']})}
}
else:
component_data = {
"name": comp['name']
}
location = ft_session.query(
'Location where name is "ftrack.unmanaged"').one()
self.log.debug('location {}'.format(location))
componentList.append({"assettype_data": {
"short": asset_type,
},
"asset_data": {
"name": instance.data["subset"],
},
"assetversion_data": {
"version": version_number,
},
"component_data": component_data,
"component_path": comp['published_path'],
'component_location': location,
"component_overwrite": False,
"thumbnail": comp['thumbnail']
}
)
self.log.debug('componentsList: {}'.format(str(componentList)))
instance.data["ftrackComponentsList"] = componentList

View file

@ -16,19 +16,19 @@ import pyblish.api
# Registers Global pyblish plugins
# pype.install()
pype.install()
# Registers Standalone pyblish plugins
PUBLISH_PATH = os.path.sep.join(
[pype.PLUGINS_DIR, 'standalonepublish', 'publish']
)
pyblish.api.register_plugin_path(PUBLISH_PATH)
# # Registers Standalone pyblish plugins
# PUBLISH_PATH = os.path.sep.join(
# [pype.PLUGINS_DIR, 'ftrack', 'publish']
# [pype.PLUGINS_DIR, 'standalonepublish', 'publish']
# )
# pyblish.api.register_plugin_path(PUBLISH_PATH)
# Registers Standalone pyblish plugins
PUBLISH_PATH = os.path.sep.join(
[pype.PLUGINS_DIR, 'ftrack', 'publish']
)
pyblish.api.register_plugin_path(PUBLISH_PATH)
def set_context(project, asset, task, app):
''' Sets context for pyblish (must be done before pyblish is launched)