Merge remote-tracking branch 'origin/ftrack-all-jakub' into sync-avalon

# Conflicts:
#	pype/plugins/global/publish/collect_deadline_user.py
#	pype/plugins/global/publish/submit_publish_job.py

fixed conflicts
This commit is contained in:
Milan Kolar 2018-10-16 22:36:38 +02:00
commit dbe6203318
420 changed files with 108195 additions and 499 deletions

View file

@ -34,8 +34,17 @@ class CollectDeadlineUser(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.499
label = "Deadline User"
<<<<<<< HEAD
hosts = ['maya', 'fusion']
families = ["studio.renderlayer", "studio.saver.deadline"]
=======
hosts = ['maya', 'fusion', 'nuke']
families = [
"renderlayer",
"saver.deadline",
"imagesequence"
]
>>>>>>> origin/ftrack-all-jakub
def process(self, context):
"""Inject the current working file"""
@ -49,4 +58,3 @@ class CollectDeadlineUser(pyblish.api.ContextPlugin):
self.log.info("Found Deadline user: {}".format(user))
context.data['deadlineUser'] = user

View file

@ -88,6 +88,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder
targets = ["filesequence"]
label = "File Sequences"
hosts = ['maya']
def process(self, context):

View file

@ -0,0 +1,126 @@
import os
import json
import re
import pyblish.api
from config.vendor import clique
class CollectJSON(pyblish.api.ContextPlugin):
""" Collecting the json files in current directory. """
label = "JSON"
order = pyblish.api.CollectorOrder
def version_get(self, string, prefix):
""" Extract version information from filenames. Code from Foundry"s
nukescripts.version_get()
"""
regex = r"[/_.]{}\d+".format(prefix)
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
msg = "No '_{}#' found in '{}'".format(prefix, string)
raise ValueError(msg)
return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group()
def process(self, context):
current_file = context.data("currentFile")
# Skip if current file is not a directory
if not os.path.isdir(current_file):
return
# Traverse directory and collect collections from json files.
instances = []
for root, dirs, files in os.walk(current_file):
for f in files:
if f.endswith(".json"):
with open(os.path.join(root, f)) as json_data:
for data in json.load(json_data):
instances.append(data)
# Validate instance based on supported families.
valid_families = ["img", "cache", "scene", "mov"]
valid_data = []
for data in instances:
families = data.get("families", []) + [data["family"]]
family_type = list(set(families) & set(valid_families))
if family_type:
valid_data.append(data)
# Create existing output instance.
scanned_dirs = []
files = []
collections = []
for data in valid_data:
if "collection" not in data.keys():
continue
if data["collection"] is None:
continue
instance_collection = clique.parse(data["collection"])
try:
version = self.version_get(
os.path.basename(instance_collection.format()), "v"
)[1]
except KeyError:
# Ignore any output that is not versioned
continue
# Getting collections of all previous versions and current version
for count in range(1, int(version) + 1):
# Generate collection
version_string = "v" + str(count).zfill(len(version))
head = instance_collection.head.replace(
"v" + version, version_string
)
collection = clique.Collection(
head=head.replace("\\", "/"),
padding=instance_collection.padding,
tail=instance_collection.tail
)
collection.version = count
# Scan collection directory
scan_dir = os.path.dirname(collection.head)
if scan_dir not in scanned_dirs and os.path.exists(scan_dir):
for f in os.listdir(scan_dir):
file_path = os.path.join(scan_dir, f)
files.append(file_path.replace("\\", "/"))
scanned_dirs.append(scan_dir)
# Match files to collection and add
for f in files:
if collection.match(f):
collection.add(f)
# Skip if no files were found in the collection
if not list(collection):
continue
# Skip existing collections
if collection in collections:
continue
instance = context.create_instance(name=data["name"])
version = self.version_get(
os.path.basename(collection.format()), "v"
)[1]
basename = os.path.basename(collection.format())
instance.data["label"] = "{0} - {1}".format(
data["name"], basename
)
families = data["families"] + [data["family"]]
family = list(set(valid_families) & set(families))[0]
instance.data["family"] = family
instance.data["families"] = ["output"]
instance.data["collection"] = collection
instance.data["version"] = int(version)
instance.data["publish"] = False
collections.append(collection)

View file

@ -0,0 +1,49 @@
import os
import json
import datetime
import time
import pyblish.api
from config.vendor import clique
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
order = pyblish.api.IntegratorOrder
label = "JSON"
def process(self, context):
workspace = os.path.join(
os.path.dirname(context.data["currentFile"]), "workspace",
"instances")
if not os.path.exists(workspace):
os.makedirs(workspace)
output_data = []
for instance in context:
data = {}
for key, value in instance.data.iteritems():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
data[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
output_data.append(data)
timestamp = datetime.datetime.fromtimestamp(
time.time()).strftime("%Y%m%d-%H%M%S")
filename = timestamp + "_instances.json"
with open(os.path.join(workspace, filename), "w") as outfile:
outfile.write(json.dumps(output_data, indent=4, sort_keys=True))

View file

@ -1,6 +1,5 @@
import os
import json
import pprint
import re
from avalon import api, io
@ -12,7 +11,7 @@ import pyblish.api
def _get_script():
"""Get path to the image sequence script"""
try:
from pype.scripts import publish_filesequence
from pype.fusion.scripts import publish_filesequence
except Exception as e:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")
@ -122,8 +121,14 @@ class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
label = "Submit image sequence jobs to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["fusion", "maya"]
families = ["studio.saver.deadline", "studio.renderlayer"]
hosts = ["fusion", "maya", "nuke"]
families = [
"studio.saver.deadline",
"studio.renderlayer",
"studio.imagesequence"
]
def process(self, instance):

View file

@ -6,7 +6,7 @@ class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
label = "Validate File Saved"
order = pyblish.api.ValidatorOrder - 0.1
hosts = ["maya", "houdini"]
hosts = ["maya", "houdini", "nuke"]
def process(self, context):