Deadline in Pype 3 - added new implementation of Event listener

Added missed custom plugins
This commit is contained in:
Petr Kalis 2021-02-25 18:49:43 +01:00
parent ae5163ad89
commit 1dcddc9b4e
16 changed files with 8467 additions and 0 deletions

View file

@ -0,0 +1,37 @@
[State]
Type=Enum
Items=Global Enabled;Opt-In;Disabled
Category=Options
CategoryOrder=0
CategoryIndex=0
Label=State
Default=Global Enabled
Description=How this event plug-in should respond to events. If Global, all jobs and slaves will trigger the events for this plugin. If Opt-In, jobs and slaves can choose to trigger the events for this plugin. If Disabled, no events are triggered for this plugin.
[PythonSearchPaths]
Type=MultiLineMultiFolder
Label=Additional Python Search Paths
Category=Options
CategoryOrder=0
CategoryIndex=1
Default=
Description=The list of paths to append to the PYTHONPATH environment variable. This allows the Python job to find custom modules in non-standard locations.
[LoggingLevel]
Type=Enum
Label=Logging Level
Category=Options
CategoryOrder=0
CategoryIndex=2
Items=DEBUG;INFO;WARNING;ERROR
Default=DEBUG
Description=Logging level where printing will start.
[PypeExecutable]
Type=MultiLineMultiFolder
Label=Path to Pype executable dir
Category=Job Plugins
CategoryOrder=1
CategoryIndex=1
Default=
Description=

View file

@ -0,0 +1,264 @@
import os
import sys
import logging
import json
import subprocess
import platform
import os
import tempfile
import time
import Deadline.Events
import Deadline.Scripting
from System import Environment
def GetDeadlineEventListener():
return PypeEventListener()
def CleanupDeadlineEventListener(eventListener):
eventListener.Cleanup()
class PypeEventListener(Deadline.Events.DeadlineEventListener):
"""
Called on every Deadline plugin event, used for injecting Pype
environment variables into rendering process.
Expects that job already contains env vars:
AVALON_PROJECT
AVALON_ASSET
AVALON_TASK
AVALON_APP_NAME
Without these only global environment would be pulled from Pype
Configure 'Path to Pype executable dir' in Deadlines
'Tools > Configure Events > pype '
Only directory path is needed.
"""
ALREADY_INJECTED = False
def __init__(self):
self.OnJobSubmittedCallback += self.OnJobSubmitted
self.OnJobStartedCallback += self.OnJobStarted
self.OnJobFinishedCallback += self.OnJobFinished
self.OnJobRequeuedCallback += self.OnJobRequeued
self.OnJobFailedCallback += self.OnJobFailed
self.OnJobSuspendedCallback += self.OnJobSuspended
self.OnJobResumedCallback += self.OnJobResumed
self.OnJobPendedCallback += self.OnJobPended
self.OnJobReleasedCallback += self.OnJobReleased
self.OnJobDeletedCallback += self.OnJobDeleted
self.OnJobErrorCallback += self.OnJobError
self.OnJobPurgedCallback += self.OnJobPurged
self.OnHouseCleaningCallback += self.OnHouseCleaning
self.OnRepositoryRepairCallback += self.OnRepositoryRepair
self.OnSlaveStartedCallback += self.OnSlaveStarted
self.OnSlaveStoppedCallback += self.OnSlaveStopped
self.OnSlaveIdleCallback += self.OnSlaveIdle
self.OnSlaveRenderingCallback += self.OnSlaveRendering
self.OnSlaveStartingJobCallback += self.OnSlaveStartingJob
self.OnSlaveStalledCallback += self.OnSlaveStalled
self.OnIdleShutdownCallback += self.OnIdleShutdown
self.OnMachineStartupCallback += self.OnMachineStartup
self.OnThermalShutdownCallback += self.OnThermalShutdown
self.OnMachineRestartCallback += self.OnMachineRestart
self.ALREADY_INJECTED = False
def Cleanup(self):
del self.OnJobSubmittedCallback
del self.OnJobStartedCallback
del self.OnJobFinishedCallback
del self.OnJobRequeuedCallback
del self.OnJobFailedCallback
del self.OnJobSuspendedCallback
del self.OnJobResumedCallback
del self.OnJobPendedCallback
del self.OnJobReleasedCallback
del self.OnJobDeletedCallback
del self.OnJobErrorCallback
del self.OnJobPurgedCallback
del self.OnHouseCleaningCallback
del self.OnRepositoryRepairCallback
del self.OnSlaveStartedCallback
del self.OnSlaveStoppedCallback
del self.OnSlaveIdleCallback
del self.OnSlaveRenderingCallback
del self.OnSlaveStartingJobCallback
del self.OnSlaveStalledCallback
del self.OnIdleShutdownCallback
del self.OnMachineStartupCallback
del self.OnThermalShutdownCallback
del self.OnMachineRestartCallback
def inject_pype_environment(self, job, additonalData={}):
if self.ALREADY_INJECTED:
self.LogInfo("Environment injected previously")
return
# adding python search paths
paths = self.GetConfigEntryWithDefault("PythonSearchPaths", "").strip()
paths = paths.split(";")
for path in paths:
self.LogInfo("Extending sys.path with: " + str(path))
sys.path.append(path)
self.LogInfo("inject_pype_environment start")
try:
if platform.system().lower() == "linux":
pype_command = "pype_console.sh"
elif platform.system().lower() == "windows":
pype_command = "pype_console.exe"
pype_root = self.GetConfigEntryWithDefault("PypeExecutable", "").strip()
pype_app = os.path.join(pype_root , pype_command)
if not os.path.exists(pype_app):
raise RuntimeError("App '{}' doesn't exist. Fix it in Tools > Configure Events > pype".format(pype_app))
# tempfile.TemporaryFile cannot be used because of locking
export_url = os.path.join(tempfile.gettempdir(),
time.strftime('%Y%m%d%H%M%S'),
'env.json') # add HHMMSS + delete later
self.LogInfo("export_url {}".format(export_url))
additional_args = {}
additional_args['project'] = job.GetJobEnvironmentKeyValue('AVALON_PROJECT')
additional_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET')
additional_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK')
additional_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME')
self.LogInfo("args::{}".format(additional_args))
args = [
pype_app,
'extractenvironments',
export_url
]
if all(additional_args.values()):
for key, value in additional_args.items():
args.append("--{}".format(key))
args.append(value)
self.LogInfo("args::{}".format(args))
exit_code = subprocess.call(args, shell=True)
if exit_code != 0:
raise RuntimeError("Publishing failed")
with open(export_url) as fp:
contents = json.load(fp)
self.LogInfo("contents::{}".format(contents))
for key, value in contents.items():
job.SetJobEnvironmentKeyValue(key, value)
Deadline.Scripting.RepositoryUtils.SaveJob(job) # IMPORTANT
self.ALREADY_INJECTED = True
os.remove(export_url)
self.LogInfo("inject_pype_environment end")
except Exception as error:
import traceback
self.LogInfo(traceback.format_exc())
self.LogInfo("inject_pype_environment failed")
Deadline.Scripting.RepositoryUtils.FailJob(job)
raise
def updateFtrackStatus(self, job, statusName, createIfMissing=False):
"Updates version status on ftrack"
pass
def OnJobSubmitted(self, job):
self.LogInfo("OnJobSubmitted LOGGING")
# for 1st time submit
self.inject_pype_environment(job)
self.updateFtrackStatus(job, "Render Queued")
def OnJobStarted(self, job):
self.LogInfo("OnJobStarted")
# inject_pype_environment shouldnt be here, too late already
self.updateFtrackStatus(job, "Rendering")
def OnJobFinished(self, job):
self.updateFtrackStatus(job, "Artist Review")
def OnJobRequeued(self, job):
self.LogInfo("OnJobRequeued LOGGING")
self.inject_pype_environment(job)
def OnJobFailed(self, job):
pass
def OnJobSuspended(self, job):
self.LogInfo("OnJobSuspended LOGGING")
self.updateFtrackStatus(job, "Render Queued")
def OnJobResumed(self, job):
self.LogInfo("OnJobResumed LOGGING")
self.updateFtrackStatus(job, "Rendering")
def OnJobPended(self, job):
self.LogInfo("OnJobPended LOGGING")
def OnJobReleased(self, job):
pass
def OnJobDeleted(self, job):
pass
def OnJobError(self, job, task, report):
self.LogInfo("OnJobError LOGGING")
data = {"task": task, "report": report}
def OnJobPurged(self, job):
pass
def OnHouseCleaning(self):
pass
def OnRepositoryRepair(self, job):
pass
def OnSlaveStarted(self, job):
self.LogInfo("OnSlaveStarted LOGGING")
def OnSlaveStopped(self, job):
pass
def OnSlaveIdle(self, job):
pass
def OnSlaveRendering(self, host_name, job):
self.LogInfo("OnSlaveRendering LOGGING")
def OnSlaveStartingJob(self, host_name, job):
self.LogInfo("OnSlaveStartingJob LOGGING")
# inject params must be here for Resubmits
self.inject_pype_environment(job)
def OnSlaveStalled(self, job):
pass
def OnIdleShutdown(self, job):
pass
def OnMachineStartup(self, job):
pass
def OnThermalShutdown(self, job):
pass
def OnMachineRestart(self, job):
pass

View file

@ -0,0 +1,76 @@
# -*- coding: utf-8 -*-
"""Remap pype path and PYPE_METADATA_PATH."""
import platform
from Deadline.Scripting import RepositoryUtils
def pype_command_line(executable, arguments, workingDirectory):
"""Remap paths in comand line argument string.
Using Deadline rempper it will remap all path found in command-line.
Args:
executable (str): path to executable
arguments (str): arguments passed to executable
workingDirectory (str): working directory path
Returns:
Tuple(executable, arguments, workingDirectory)
"""
print("-" * 40)
print("executable: {}".format(executable))
print("arguments: {}".format(arguments))
print("workingDirectory: {}".format(workingDirectory))
print("-" * 40)
print("Remapping arguments ...")
arguments = RepositoryUtils.CheckPathMapping(arguments)
print("* {}".format(arguments))
print("-" * 40)
return executable, arguments, workingDirectory
def pype(deadlinePlugin):
"""Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars.
`PYPE_METADATA_FILE` is used on farm to point to rendered data. This path
originates on platform from which this job was published. To be able to
publish on different platform, this path needs to be remapped.
`PYPE_PYTHON_EXE` can be used to specify custom location of python
interpreter to use for Pype. This is remappeda also if present even
though it probably doesn't make much sense.
Arguments:
deadlinePlugin: Deadline job plugin passed by Deadline
"""
job = deadlinePlugin.GetJob()
pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE")
pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE")
# test if it is pype publish job.
if pype_metadata:
pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata)
if platform.system().lower() == "linux":
pype_metadata = pype_metadata.replace("\\", "/")
print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata))
job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata)
deadlinePlugin.SetProcessEnvironmentVariable(
"PYPE_METADATA_FILE", pype_metadata)
if pype_python:
pype_python = RepositoryUtils.CheckPathMapping(pype_python)
if platform.system().lower() == "linux":
pype_python = pype_python.replace("\\", "/")
print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python))
job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python)
deadlinePlugin.SetProcessEnvironmentVariable(
"PYPE_PYTHON_EXE", pype_python)
deadlinePlugin.ModifyCommandLineCallback += pype_command_line
def __main__(deadlinePlugin):
pype(deadlinePlugin)

View file

@ -0,0 +1,911 @@
/*
Copyright 2017 Thinkbox Software
All Rights Reserved
This file has the following public functions. Examples of how to use them are below:
get_network_assets
get_filename_attributes_from_scene
get_source_file_paths_from_attr
get_files_to_copy
write_asset_introspection_file_transfer_pairs
gather_source_file_copies
change_to_relative_directory
write_repathing_mel_script_file
repath_assets
find_asset_paths
do_local_asset_caching
Use #1 - Local Asset Caching:
This code is used in production for our "Local Asset Caching". In this case, everything is run on the slave-side, it copies the files, then repaths them.
You can use it directly on the slave-side by calling do_local_asset_caching.
In this case, this file must be executed on the slave-side.
User #2 - Local hot folder synching and remote asset repathing:
This file also contains all the functions needed to do scene introspection and hot folder copying.
This is an example of what we'd do on the submission-side if you wanted to copy all the files into a hot-folder, followed
by creating a script that should run on the slave-side-end which will repath them to the correct place.
It was written for Rodeo FX, but was never put into production.
NOTE: In this case, this MEL script must execute on the SUBMITTER side.
So, if we want to use this code for both 'local asset caching' and this hot-folder thing, we have to have this AssetTools.mel file both on the slave and the submitter.
//Example submitter function:
proc function_to_run_on_submission_for_scene_introspection_and_hot_folder_syncing() {
//This function creates a file called "asset_repathing_script.mel" which must be run on the slave-side to do the repathing.
string $attrNames[0];
string $cacheFileObjectNames[0];
string $yetiAttrNames[0];
string $sourceFiles[0];
string $destFiles[0];
string $gatheredSourceFiles[0];
get_filename_attributes_from_scene( $attrNames, $cacheFileObjectNames, $yetiAttrNames );
// Only use if you want to filter out files that are already local
string $networkPrefixes[] = { "//", "/", "X:", "x:", "Y:", "y:", "Z:", "z:" };
get_network_assets( $attrNames, "basic", $networkPrefixes );
get_network_assets( $cacheFileObjectNames, "cacheFile", $networkPrefixes );
get_network_assets( $yetiAttrNames, "yeti", $networkPrefixes );
get_files_to_copy( "C:/temp", $attrNames, $cacheFileObjectNames, $yetiAttrNames, $sourceFiles, $destFiles );
gather_source_file_copies("C:/temp", $sourceFiles, $gatheredSourceFiles );
change_to_relative_directory( $gatheredSourceFiles, "C:/temp");
write_asset_introspection_file_transfer_pairs( "C:/temp/maya_asset_pairs.txt", $gatheredSourceFiles, $destFiles );
repath_assets( "C:/temp", $attrNames, $cacheFileObjectNames, $yetiAttrNames );
write_repathing_mel_script_file( "C:/temp/asset_repathing_script.mel", "C:/temp", $attrNames, $cacheFileObjectNames, $yetiAttrNames );
}
*/
///////////////////////////////INTERNAL FUNCTIONS////////////////////////////////////
proc int is_windows_path( string $v ) {
//Starts with "//" (windows network)
//Starts with "[driveletter]:/" (windows filesystem)
//Add to this as needed.
$v = fromNativePath( $v );
if( size($v) < 4 ) //min size of four
return false;
$matchVal = `match ".*\n.*" $v`; //no newlines. important to add because of embedded mel in string attributes.
if( $matchVal != "" )
return false;
if( startsWith( $v, "//" ) && !startsWith( $v, "///" ) ) //double slash starting. good.
return true;
$matchVal = `match "^[a-zA-Z]:/.*" $v`; //windows drive letter style. good. absolute paths only.
if( $matchVal != "" )
return true;
return false;
}
proc int is_posix_path( string $v ) {
//Starts with "/" (linux filesystem)
//Add to this as needed.
$v = fromNativePath( $v );
if( size($v) < 2 ) //min size of two
return false;
$matchVal = `match ".*\n.*" $v`; //no newlines. important to add because of embedded mel in string attributes.
if( $matchVal != "" )
return false;
if( startsWith( $v, "/" ) && !startsWith( $v, "//" ) ) //must start with single slash. only absolute paths supported (can't have a double slash. that's windows)
return true;
return false;
}
proc string to_native_path( string $v ) {
//This is used to swap slashes on windows.
//There is already a MEL script function called toNativePath, but swaps to the filename types of the current OS. We want to do it based on the actual style of path instead.
//NOTE: MEL's "fromNativePath" ALWAYS converts everything to forward slashes, but "toNativePath" does it based on the current OS.
if( is_windows_path( $v ) )
while( $v != ( $v = `substitute "/" $v "\\"` ) ); //swap to backslashes
return $v;
}
proc int looks_like_filepath( string $attrVal ) {
//check all the cases for absolute filenames:
if( is_windows_path( $attrVal ) )
return true;
if( is_posix_path( $attrVal ) )
return true;
return false;
}
proc string get_asset_path( string $attrName ) {
//Get a filename out of a string attribute
//Normally this is just the value of the attribute.
//HOWEVER there are special exceptions that we may need to handle here.
//An example might be filenames that use "####" in place of the current frame number or something. An example is Krakatoa objects. However, this isn't handled yet.
//Normal case, just use the string attribute itself. Most things will end up here.
$attrVal = `getAttr $attrName`; //should return string.
//Just exclude the empty string case right away
if( $attrVal == "" )
return "";
//This swaps backslashes to forward slashes
$attrVal = fromNativePath( $attrVal );
if( looks_like_filepath( $attrVal ) )
return $attrVal;
return "";
}
proc int is_attribute_a_string( string $nodeAndAttrName ) {
//for some reason, some attributes produce a "The value for the attribute could not be retrieved" when trying to query them. If that's the case, then just skip it. I don't think this'll be an issue.
if( catchQuiet( `getAttr -type $nodeAndAttrName` ) ) {
return false;
}
$attrType = `getAttr -type $nodeAndAttrName`;
if( $attrType == "string" ) {
return true;
}
return false;
}
proc int is_multi_attribute_a_string( string $nodeName, string $attrNameComponents[] ) {
//Test whether this multi-attribute holds strings
//Without this test prior to recursion, we'll be recursively iterating through a lot of useless non-string attributes. For example, mesh vertices.
//For example, recurse to the end of "tweak1.plist[0].controlPoints[0].xValue" and determine if "xValue" is a string attribute.
$multiAttrName = $nodeName;
for( $i=0; $i<size($attrNameComponents)-1; $i++ ) {
$multiAttrName += "." + $attrNameComponents[$i];
int $indexArray[] = `getAttr -multiIndices $multiAttrName`;
if( size($indexArray) == 0 )
return false; //this array of multi-attributes is empty. we don't need to continue.
$multiAttrName += "[" + $indexArray[0] + "]"; //add the first index in this multi-attribute
}
$multiAttrName += "." + $attrNameComponents[ size($attrNameComponents)-1 ]; //add the actual non-multi-portion of the attribute to the end
return is_attribute_a_string( $multiAttrName );
}
proc int is_attribute_storable( string $attrName )
{
string $attrComponents[] = stringToStringArray( $attrName, "." );
int $attrArraySize = `size($attrComponents)`;
string $attrComponentAttr = $attrComponents[ $attrArraySize - 1 ];
stringArrayRemoveAtIndex( $attrArraySize - 1, $attrComponents );
string $attrComponentBase = stringArrayToString( $attrComponents, "." );
return `attributeQuery -st -node $attrComponentBase $attrComponentAttr`;
}
proc get_multi_attrs_recursive( string $outAttrs[], string $attrNameBase, string $attrNameComponents[], int $level ) {
//Examples:
//file1.explicitUvTiles[0].explicitUvTileName
//file1.explicitUvTiles[1].explicitUvTileName
//tweak1.plist[0].controlPoints[0].xValue
$attrName = $attrNameBase + "." + $attrNameComponents[$level];
if( $level == size($attrNameComponents) - 1 ) {
$outAttrs[ size($outAttrs) ] = $attrName;
return;
}
int $indexArray[] = `getAttr -multiIndices $attrName`;
for( $i=0; $i<size($indexArray); $i++ ) {
$attrNameWithIndex = $attrName + "[" + $indexArray[$i] + "]";
get_multi_attrs_recursive( $outAttrs, $attrNameWithIndex, $attrNameComponents, ($level+1) );
}
}
proc string[] get_all_string_attributes_from_node( string $nodeName ) {
string $outAttrs[];
$attrNameList = `listAttr -usedAsFilename $nodeName`;
for( $i=0; $i<size($attrNameList); $i++ ) {
$attrName = $attrNameList[$i];
//See if this is a "multi-attribute" by splitting on "."
//Example: $attrName = "explicitUvTiles.explicitUvTileName"
//Example: $attrName = plist.controlPoints.xValue... This needs to be accessed by tweak1.plist[0].controlPoints[0].xValue
$attrNameComponents = stringToStringArray( $attrName, "." );
if( size($attrNameComponents) >= 2 ) {
//multi-attribute case. must use recursion to get the attributes.
if( is_multi_attribute_a_string( $nodeName, $attrNameComponents ) ) {
get_multi_attrs_recursive( $outAttrs, $nodeName, $attrNameComponents, 0 ); //will append to $outAttrs
}
} else {
//Normal (non-multi) attribute case
//Example: "file1.fileTextureName"
string $fullNodeAndAttrName = $nodeName + "." + $attrName;
if( is_attribute_a_string( $fullNodeAndAttrName ) ) {
$outAttrs[ size($outAttrs) ] = $fullNodeAndAttrName; //append to $outAttrs
}
}
}
return $outAttrs;
}
proc string[] get_all_string_attributes_from_scene() {
//The values in the outgoing array look like "nodeName.attrName" or "nodeName.multiAttrName[index].attrName", etc.
string $allStringAttrs[0];
$allSceneNodes = `ls`;
for( $i=0; $i<size($allSceneNodes); $i++ ) {
$nodeName = $allSceneNodes[$i];
//Certain nodes and handled with special cases, and should be ignored here.
//For example, "cacheFile" type objects use both a "Base Directory" and a "Base Name" attributes to construct a full filename.
//So we handle those nodes separately later on.
$nodeType = `objectType $nodeName`;
if( $nodeType == "cacheFile" )
continue;
string $stringAttrs[] = `get_all_string_attributes_from_node $nodeName`;
//append these to the outgoing array
appendStringArray( $allStringAttrs, $stringAttrs, size($stringAttrs) );
}
return $allStringAttrs;
}
proc int check_if_file_exists( string $filename, string $nameOfSourceAttr ) {
if( $filename == "" )
return false;
if( !`filetest -f $filename` ) {
if( `filetest -d $filename` )
print( "WARNING: Skipping potential introspection file. The following attribute appears to be a directory, and not a filename: " + $nameOfSourceAttr + " = '" + $filename + "'\n" );
else
print( "WARNING: Skipping potential introspection file. The following attribute looks like a filename, however the file does not exist: " + $nameOfSourceAttr + " = '" + $filename + "'\n" );
return false;
}
return true;
}
proc get_all_filename_attributes_from_scene( string $outAttrNames[] ) {
//This gives us all string attributes in the scene.
//Both returned arrays are the same length. Together they make pairs.
//The values in here look like "nodeName.attrName" or "nodeName.multiAttrName[index].attrName", etc.
//Each one is queryable as-is and will return a string.
string $stringAttrArray[] = `get_all_string_attributes_from_scene`;
for( $i=0; $i<size($stringAttrArray); $i++ ) {
$stringAttr = $stringAttrArray[$i];
//Get the value out of this attribute.
$filenameValue = get_asset_path( $stringAttr );
//Test to see if this file actually exists.
//The above code could have caught directories, etc. or just strings that look like filenames.
// udim files won't be considered files since the token isn't evaluated
if( `gmatch $filenameValue "*<udim>*"` || `gmatch $filenameValue "*<UDIM>*"` || check_if_file_exists( $filenameValue, $stringAttr ) ) {
$outAttrNames[ size($outAttrNames) ] = $stringAttr; //append
}
}
}
proc get_all_filename_attributes_from_cacheFile_objects( string $outAttrNames[] ) {
//Nodes of type cacheFiles are intentionally skipped over in the attribute introspection code.
//That is because they present a special case where there are two files (.mcx, .xml) that are derived from a base directory and base name.
//I'm not 100% sure if it's always a .mcx file that it needs, but in our current case, it is. There always appears to be a .xml file too.
string $allCacheNodeNames[] = `ls -type "cacheFile"`;
for( $i=0; $i<size($allCacheNodeNames); $i++ ) {
$outAttrNames[ size( $outAttrNames ) ] = $allCacheNodeNames[$i];
}
}
proc get_all_filename_attributes_from_yeti_objects( string $outYetiAttrNames[] ) {
//http://peregrinelabs-deploy.s3.amazonaws.com/Documentation/Yeti/1.3.16/scriptingref.html
//Currently we're supporting Yeti's texture node's "file_name" attribute, and Yeti's reference node's "reference_file" attribute.
//Maybe in the future this could be expanded to be more general if needed.
string $yetiShapesArray[] = `ls -type "pgYetiMaya"`;
string $yetiShapesArrayA[] = `ls -type "pgYetiMayaFeather"`; //I really think only pgYetiMaya nodes can have a Yeti graph. But I don't know. Maybe these other two types can too. I have them in here just for safe measure.
string $yetiShapesArrayB[] = `ls -type "pgYetiGroom"`;
appendStringArray( $yetiShapesArray, $yetiShapesArrayA, size($yetiShapesArrayA) );
appendStringArray( $yetiShapesArray, $yetiShapesArrayB, size($yetiShapesArrayB) );
for( $i=0; $i<size($yetiShapesArray); $i++ ) {
$yetiShape = $yetiShapesArray[$i];
string $yetiTextureNodeList[0];
if( catch( $yetiTextureNodeList = `pgYetiGraph -listNodes -type "texture" $yetiShape` ) ) {
print( "WARNING: There are Yeti nodes in the scene, but the Yeti plugin does not appear to be loaded. Texture files within the Yeti nodes will not be repathed.\n" );
return;
}
for( $j=0; $j<size($yetiTextureNodeList); $j++ ) {
$yetiTextureNode = $yetiTextureNodeList[$j];
$attrVal = `pgYetiGraph -node $yetiTextureNode -param "file_name" -getParamValue $yetiShape`;
$attrName = $yetiShape + "." + $yetiTextureNode + ".file_name"; //Make it a period separted attribute name. This is just for convenience. Later on in the code we will split this out into 3 parts again.
if( check_if_file_exists( $attrVal, $attrName ) ) {
$outYetiAttrNames[ size($outYetiAttrNames) ] = $attrName;
}
}
$yetiReferenceNodeList = `pgYetiGraph -listNodes -type "reference" $yetiShape`;
for( $j=0; $j<size($yetiReferenceNodeList); $j++ ) {
$yetiReferenceNode = $yetiReferenceNodeList[$j];
$attrVal = `pgYetiGraph -node $yetiReferenceNode -param "reference_file" -getParamValue $yetiShape`;
$attrName = $yetiShape + "." + $yetiReferenceNode + ".reference_file"; //Make it a period separted attribute name. This is just for convenience. Later on in the code we will split this out into 3 parts again.
if( check_if_file_exists( $attrVal, $attrName ) ) {
$outYetiAttrNames[ size($outYetiAttrNames) ] = $attrName;
}
}
}
}
proc string asset_repathing_translation( string $sourcePath, string $repathRoot ) {
//Here is the rules this function uses. Dest paths can be Windows or Posix. The input path always satisfies one of these three:
// 1) C:/win/path/myfile.ext > /repath_root/C/win/path/myfile.ext
// 2) //netdrive/win/path/myfile.ext > /repath_root/netdrive/win/path/myfile.ext
// 3) /posix/path/myfile.ext > /repath_root/posix/path/myfile.ext
//Potential trouble:
//Some characters that are valid in Windows may not be valid in Posix paths. I'm just assuming here that all the characters are allowable in both systems.
//Maybe need to do some additional mapping here to make it work.
if( !looks_like_filepath( $repathRoot ) ) {
print( "ERROR: Could not repath to here because it is not a valid absolute path: '" + $repathRoot + "'\n" );
return "";
}
$repathRoot = fromNativePath( $repathRoot );
if( substring( $repathRoot, size($repathRoot), size($repathRoot) ) == "/" ) //strip trailing slash
$repathRoot = substring( $repathRoot, 1, size($repathRoot)-1 );
$outPath = "";
if( is_windows_path( $sourcePath ) ) {
$winPath = "";
if( startsWith( $sourcePath, "//" ) ) {
$winPath = substring( $sourcePath, 3, size($sourcePath) ); //strip double-slash
} else {
//remove the ":", so it looks like "/repath_root/C/previous_path/filename.ext"
$winPath = toupper( substring( $sourcePath, 1, 1 ) ) + substring( $sourcePath, 3, size($sourcePath) );
}
$outPath = $repathRoot + "/" + $winPath;
} else { //is_posix_path() will return true here, because was checked earlier to be one or the other.
$outPath = $repathRoot + $sourcePath;
}
return $outPath;
}
proc int is_local_file( string $path, string $networkPrefixes[] ) {
int $isLocal = true;
for( $i=0; $i<size($networkPrefixes); $i++ ) {
if( startsWith( $path, $networkPrefixes[$i] ) ) {
$isLocal = false;
break;
}
}
return $isLocal;
}
proc string[] get_cacheFile_paths( string $nodeName ) {
string $cachePathAttr = $nodeName + ".cachePath";
string $baseNameAttr = $nodeName + ".cacheName";
string $baseName = `getAttr $baseNameAttr`;
string $cachePath = `getAttr $cachePathAttr`;
$cachePath = fromNativePath( $cachePath );
//combine path and base name. make sure there's a "/" separator
string $lastChar = `substring $cachePath (size($cachePath)) (size($cachePath))`;
if( $lastChar != "/" )
$cachePath = $cachePath + "/";
$fullPathWithoutExt = $cachePath + $baseName;
$mcxFilename = $fullPathWithoutExt + ".mcx";
$xmlFilename = $fullPathWithoutExt + ".xml";
return {$mcxFilename, $xmlFilename};
}
proc string get_cacheFile_directory( string $nodeName ) {
string $cachePathAttr = $nodeName + ".cachePath";
string $cachePath = `getAttr $cachePathAttr`;
$cachePath = fromNativePath( $cachePath );
//combine path and base name. make sure there's a "/" separator
string $lastChar = `substring $cachePath (size($cachePath)) (size($cachePath))`;
if( $lastChar != "/" )
$cachePath = $cachePath + "/";
return $cachePath;
}
proc string get_yeti_path( string $nodeName ) {
string $yetiValues[];
$yetiValues = stringToStringArray( $nodeName, "." ); // 3 period separated values
return `pgYetiGraph -node $yetiValues[1] -param $yetiValues[2] -getParamValue $yetiValues[0]`;
}
proc translate_all_asset_files( string $repathRoot, string $attrNames[], string $cacheFileObjectNames[], string $yetiAttrNames[], string $outAttrFiles[], string $outCacheFiles[], string $outYetiFiles[] ) {
clear($outAttrFiles);
clear($outCacheFiles);
clear($outYetiFiles);
// translate attribute paths
for( $i=0; $i<size($attrNames); $i++ ) {
$outAttrFiles[$i] = asset_repathing_translation( get_asset_path( $attrNames[$i] ), $repathRoot );
}
// translate cacheFile paths
for( $i=0; $i<size($cacheFileObjectNames); $i++ ) {
$outCacheFiles[$i] = asset_repathing_translation( get_cacheFile_directory( $cacheFileObjectNames[$i] ), $repathRoot );
}
// translate yeti paths
for( $i=0; $i<size($yetiAttrNames); $i++ ) {
$outYetiFiles[$i] = asset_repathing_translation( get_yeti_path( $yetiAttrNames[$i] ), $repathRoot );
}
}
proc int manifest_file_dirty( string $manifestPath, string $sourceFile, string $destFile )
{
// given an existing manifest path and two existing asset paths (cache and network), compare the timestamps of the manifest and the files.
// the first line of the manifest file will be the cacheFile's timestamp, the second is the networkFile's timestamp
python( "import os" );
string $sourceModifiedtime = python( "os.path.getmtime('" + $sourceFile + "')" );
string $destModifiedtime = "";
if( catch( $destModifiedtime = python( "os.path.getmtime('" + $destFile + "')" ) ) )
{
print( "Failed to get modified time from file: " + $destFile );
return true;
}
$fileId = `fopen $manifestPath "r"`;
$sourceManifestTime = `fgetline $fileId`;
$destManifestTime = `fgetline $fileId`;
fclose $fileId;
$sourceManifestTime = substituteAllString($sourceManifestTime, "\n", "");
$destManifestTime = substituteAllString($destManifestTime, "\n", "");
return !( ($sourceModifiedtime == $sourceManifestTime) && ($destModifiedtime == $destManifestTime ) );
}
// Walk over manifest files in a directory and delete the old assets
proc inspect_manifests( string $manifestFiles[], int $deleteTimer )
{
python( "import time" );
int $manifestSize_CONST = 9; // ".manifest" has 9 characters
int $deleteTimerInSec = $deleteTimer * 24 * 60 * 60;
for( $manifest in $manifestFiles )
{
$fileId = `fopen $manifest "r"`;
string $filedata = `fread $fileId $filedata`;
string $fileLines[];
tokenize $filedata "\n" $fileLines;
int $manifestTime = int( $fileLines[size($fileLines)-1] );
fclose $fileId;
int $currTime = python( "int(time.time())" );
//print( "elapsed=" + ( $currTime - $manifestTime ) + " required=" + $deleteTimerInSec + "\n" );
if( $currTime - $manifestTime > $deleteTimerInSec )
{
string $assetName = `substring $manifest 1 (size($manifest)-$manifestSize_CONST)`;
print( "Deleting asset file " + $assetName + " because it is more than " + $deleteTimer + " days old\n" );
sysFile -delete $assetName;
sysFile -delete $manifest;
}
}
}
proc delete_assets( string $cacheDir, int $deleteTimer ) {
string $manifests[] = `getFileList -folder $cacheDir -fs "*.manifest"`;
for( $i = 0; $i < size($manifests); $i++ )
{
$manifests[$i] = $cacheDir + "/" + $manifests[$i];
}
string $folders[] = `getFileList -folder $cacheDir`;
for( $folder in $folders )
{
string $currFolder = $cacheDir + "/" + $folder;
delete_assets( $currFolder, $deleteTimer );
}
inspect_manifests( $manifests, $deleteTimer );
}
////////////////////// EXTERNAL FUNCTIONS TO BE USED FOR HOT-FOLDER SYNCHING AND REPATH, AND LOCAL ASSET CACHING //////////////////
// Using $networkPrefixes we filter out all assets that don't have a network prefix so that we only mess with network files
global proc get_network_assets( string $outAttrNames[], string $type, string $networkPrefixes[] ) {
string $filenameValue;
for( $i=size($outAttrNames)-1; $i>=0; $i-- ) {
if( $type == "cacheFile" ) {
string $filenames[] = get_cacheFile_paths( $outAttrNames[$i] );
// Only need to check 1 path, as they have the same folder
$filenameValue = $filenames[0];
} else if( $type == "yeti" ) {
$filenameValue = get_yeti_path( $outAttrNames[$i] );
} else { // basic
$filenameValue = get_asset_path( $outAttrNames[$i] );
}
if( is_local_file( $filenameValue, $networkPrefixes ) ) {
stringArrayRemoveAtIndex( $i, $outAttrNames );
}
}
}
global proc get_filename_attributes_from_scene( string $outAttrNames[], string $outCacheFileObjectNames[], string $outYetiAttrNames[] ) {
clear( $outAttrNames );
clear( $outCacheFileObjectNames );
clear( $outYetiAttrNames );
//First get name/value pairs from string attributes in the scene
//This will be the majority of all assets. Special case attributes to follow.
get_all_filename_attributes_from_scene( $outAttrNames );
//Special case alert: Nodes of type "cacheFile" need to be special caseed because they combine two attributes to make a path.
get_all_filename_attributes_from_cacheFile_objects( $outCacheFileObjectNames );
//Special case the Yeti nodes because you have to get attributes differently.
if( `pluginInfo -query -loaded "pgYetiMaya"` ){
get_all_filename_attributes_from_yeti_objects( $outYetiAttrNames );
}
}
global proc get_source_file_paths_from_attr( string $outAttrNames[], string $outCacheFileObjectNames[], string $outYetiAttrNames[], string $outSourceFiles[] ){
python( "import maya.app.general.fileTexturePathResolver" );
// Build list of files
for( $i=0; $i<size($outAttrNames); $i++ ) {
$sourceFile = get_asset_path( $outAttrNames[$i] );
// check for tokens in file pattern
string $computedFileAttr = `substitute "fileTextureName" $outAttrNames[$i] "computedFileTextureNamePattern"`;
if (`objExists $computedFileAttr`) {
string $seq_pattern = `getAttr($computedFileAttr)`;
string $filePaths[] = python( "maya.app.general.fileTexturePathResolver.findAllFilesForPattern( '" + $seq_pattern + "', None )" );
for( $j=0; $j<size($filePaths); $j++) {
$outSourceFiles[ size( $outSourceFiles ) ] = $filePaths[$j];
}
} else {
$outSourceFiles[ size( $outSourceFiles ) ] = $sourceFile;
}
}
for( $i=0; $i<size($outCacheFileObjectNames); $i++ ) {
appendStringArray( $outSourceFiles, get_cacheFile_paths( $outCacheFileObjectNames[$i] ), 2 );
}
for( $i=0; $i<size($outYetiAttrNames); $i++ ) {
$outSourceFiles[ size( $outSourceFiles ) ] = get_yeti_path( $outYetiAttrNames[$i] );
}
$outSourceFiles = stringArrayRemoveDuplicates( $outSourceFiles );
}
global proc get_files_to_copy( string $repathRoot, string $outAttrNames[], string $outCacheFileObjectNames[], string $outYetiAttrNames[], string $outSourceFiles[], string $outDestFiles[] ){
get_source_file_paths_from_attr( $outAttrNames, $outCacheFileObjectNames, $outYetiAttrNames, $outSourceFiles );
//Translate the paths, and put into our outgoing-variable $outDestFiles
for( $i=0; $i<size($outSourceFiles); $i++ ) {
$outDestFiles[$i] = asset_repathing_translation( $outSourceFiles[$i], $repathRoot );
}
}
global proc get_redshift_cache_files( string $sourceFiles[], string $outRedshiftCacheFiles[] ) {
for( $sourceFile in $sourceFiles ){
string $sourceDirectory = dirname( $sourceFile );
string $oldExtension = fileExtension( $sourceFile );
string $filename = basename( $sourceFile, $oldExtension );
string $cacheFiles[] = `getFileList -folder $sourceDirectory -filespec ($filename+"rs*bin")`;
for( $cacheFile in $cacheFiles )
{
$outRedshiftCacheFiles[ size( $outRedshiftCacheFiles ) ] = ( $sourceDirectory +"/"+ $cacheFile );
}
}
}
global proc write_asset_introspection_file_transfer_pairs( string $outputSourceDestPairsFilename, string $sourceFiles[], string $destFiles[] ) {
print( "ASSETS TRANSFERED: " + size( $sourceFiles ) + "\n" );
if( size( $sourceFiles ) > 0 ) {
$outputSourceDestPairsFilename = fromNativePath( $outputSourceDestPairsFilename ); //Sanitize
$fileId = `fopen $outputSourceDestPairsFilename "w"`;
for( $i=0; $i<size($sourceFiles); $i++ ) {
//swap to windows-style backslashes for the output of all this. currently they're all forward slashes.
$source = $sourceFiles[$i];
$dest = $destFiles[$i];
$source = to_native_path( $source ); //swap to backslashes if necessary
$dest = to_native_path( $dest );
$fileLineStr = ( $source + ">" + $dest + "\n" );
fprint $fileId $fileLineStr;
print( $fileLineStr ); //For debug
}
fclose $fileId;
}
}
global proc gather_source_file_copies( string $gatherDirectoryRoot, string $inOrigSourceFiles[], string $outNewSourceFiles[] ) {
//NOTE: On error (couldn't copy for some reason), an error message is printed, and the original path to the file is returned.
//We might need to change this behaviour to handle errors better/differently.
//Sanitize inputs
clear( $outNewSourceFiles );
$gatherDirectoryRoot = fromNativePath( $gatherDirectoryRoot );
if( substring( $gatherDirectoryRoot, size($gatherDirectoryRoot), size($gatherDirectoryRoot) ) != "/" ) //add trailing slash
$gatherDirectoryRoot += "/";
for( $i=0; $i<size($inOrigSourceFiles); $i++ ) {
$sourceFile = fromNativePath( $inOrigSourceFiles[$i] );
$destFile = "";
$destFile = asset_repathing_translation( $sourceFile, $gatherDirectoryRoot );
string $destDirectory = python( "import os; os.path.dirname('" + $destFile + "')" ); //get directory part
$dirSuccess = true;
if( !`filetest -d $destDirectory` ) {
$pythonScriptMakeDir = "import os; os.makedirs('" + $destDirectory + "')"; //recursive dir creation
if( catch( python( $pythonScriptMakeDir ) ) ) {
print( "ERROR: Could not create the gathering destination directory: '" + $destFile + "'\n" );
$destFile = $sourceFile;
$dirSuccess = false;
}
}
if( $dirSuccess ) { // manifest is of the form src \n dest ( network \n cache )
$doCopy = false;
$manifestPath = $destFile + ".manifest";
// if manifest non existant or manifest dirty
// something is wrong with checking the status of the file
if( !(`file -q -exists $manifestPath`) || manifest_file_dirty( $manifestPath, $sourceFile, $destFile) ) {
print("Manifest file outdated... Updating asset cache.\n");
$doCopy = true;
}
else { // manifest file exists and is clean
print("Manifest file is up-to-date... Rewriting last accessed time.\n");
// write back manifest data with updated access time
python( "import time" );
// grab the first two lines to write them back to the file
$fileId = `fopen $manifestPath "r"`;
$sourceManifestTime = `fgetline $fileId`;
$destManifestTime = `fgetline $fileId`;
fclose $fileId;
$fileId = `fopen $manifestPath "w"`;
string $currTime = python( "int(time.time())" );
fprint $fileId $sourceManifestTime;
fprint $fileId $destManifestTime;
fprint $fileId $currTime;
fclose $fileId;
}
if( $doCopy ) {
$copySuccess = `sysFile -copy $destFile $sourceFile`;
if( !$copySuccess ) {
print( "ERROR: Could not copy file from '" + $sourceFile + "' to '" + $destFile + "'\n" );
$destFile = $sourceFile;
} else {
print( "File copied from '" + $sourceFile + "' to '" + $destFile + "'\n" );
// (over)write manifest file. sourcetime \n desttime
$fileId = `fopen $manifestPath "w"`;
python( "import os, time" );
string $newSourceManifest = python("os.path.getmtime('" + $sourceFile + "')");
string $newDestManifest = python("os.path.getmtime('" + $destFile + "')");
string $currTime = python( "int(time.time())" );
fprint $fileId ($newSourceManifest+"\n");
fprint $fileId ($newDestManifest+"\n");
fprint $fileId $currTime;
// set line 3 to current time
fclose $fileId;
}
}
}
$outNewSourceFiles[$i] = to_native_path( $destFile );
}
}
global proc change_to_relative_directory( string $fileArray[], string $rootDir ) {
//Convenience function. Turns absolute paths into relative paths based on the root directory.
$rootDir = fromNativePath( $rootDir );
if( substring( $rootDir, size($rootDir), size($rootDir) ) != "/" ) //add trailing slash
$rootDir += "/";
for( $i=0; $i<size($fileArray); $i++ ) {
$file = fromNativePath( $fileArray[$i] );
if( startsWith( $file, $rootDir ) ) {
$newFile = substring( $file, size($rootDir)+1, size($file) );
if( is_windows_path( $file ) ) {
while( $newFile != ( $newFile = `substitute "/" $newFile "\\"` ) ); //swap to backslashes
}
$fileArray[$i] = $newFile;
}
}
}
global proc write_repathing_mel_script_file( string $outputMelScriptRepathingFilename, string $repathRoot, string $attrNames[], string $cacheFileObjectNames[], string $yetiAttrNames[] ) {
string $translatedAttrFiles[];
string $translatedCacheFiles[];
string $translatedYetiFiles[];
translate_all_asset_files( $repathRoot, $attrNames, $cacheFileObjectNames, $yetiAttrNames, $translatedAttrFiles, $translatedCacheFiles, $translatedYetiFiles );
$fileId = `fopen $outputMelScriptRepathingFilename "w"`;
// repath attribute paths
for( $i=0; $i<size($attrNames); $i++ ) {
if( is_attribute_storable( $attrNames[$i] ) ) {
if( `gmatch $translatedAttrFiles[$i] "*<udim>*"` || `gmatch $translatedAttrFiles[$i] "*<UDIM>*"`|| `filetest -f $translatedAttrFiles[$i]` ) {
$fileLineStr = "catch( `setAttr -type \"string\" \"" + $attrNames[$i] + "\" \"" + $translatedAttrFiles[$i] + "\"` );\n";
fprint $fileId $fileLineStr;
}
}
}
// repath cacheFile directories
for( $i=0; $i<size($cacheFileObjectNames); $i++ ) {
string $cachPathAttr = $cacheFileObjectNames[$i] + ".cachePath";
if(`attributeQuery -st -node $cacheFileObjectNames[$i] "cachePath"` ) {
if( `filetest -f $translatedCacheFiles[$i]` ) {
$fileLineStr = "catch( `setAttr -type \"string\" \"" + $cachPathAttr + "\" \"" + $translatedCacheFiles[$i] + "\"` );\n";
fprint $fileId $fileLineStr;
}
}
}
// repath yeti
for( $i=0; $i<size($yetiAttrNames); $i++ ) {
string $attrComponents[];
$attrComponents = stringToStringArray( $yetiAttrNames[$i], "." ); //Always size 3 because we made these in the above code.
$attrValue = $translatedYetiFiles[$i];
$shape = $attrComponents[0];
$node = $attrComponents[1];
$attr = $attrComponents[2];
if( `filetest -f $attrValue` ) {
$fileLineStr = "catch( `pgYetiGraph -node \"" + $node + "\" -param \"" + $attr + "\" -setParamValueString \"" + $attrValue + "\" \"" + $shape + "\"` );\n";
fprint $fileId $fileLineStr;
}
}
fclose $fileId;
}
global proc repath_assets( string $repathRoot, string $attrNames[], string $cacheFileObjectNames[], string $yetiAttrNames[] ) {
string $translatedAttrFiles[];
string $translatedCacheFiles[];
string $translatedYetiFiles[];
translate_all_asset_files( $repathRoot, $attrNames, $cacheFileObjectNames, $yetiAttrNames, $translatedAttrFiles, $translatedCacheFiles, $translatedYetiFiles );
// repath attribute paths
for( $i=0; $i<size($attrNames); $i++ ) {
if( is_attribute_storable( $attrNames[$i] ) ){
if( `gmatch $translatedAttrFiles[$i] "*<udim>*"` || `gmatch $translatedAttrFiles[$i] "*<UDIM>*"`|| `filetest -f $translatedAttrFiles[$i]` ) {
catch( `setAttr -type "string" $attrNames[$i] $translatedAttrFiles[$i]` );
}
}
}
// repath cacheFile directories
for( $i=0; $i<size($cacheFileObjectNames); $i++ ) {
string $cachPathAttr = $cacheFileObjectNames[$i] + ".cachePath";
if(`attributeQuery -st -node $cacheFileObjectNames[$i] "cachePath"` ) {
if( `filetest -f $translatedCacheFiles[$i]` ) {
catch( `setAttr -type "string" $cachPathAttr $translatedCacheFiles[$i]` );
}
}
}
// repath yeti
for( $i=0; $i<size($yetiAttrNames); $i++ ) {
string $attrComponents[];
$attrComponents = stringToStringArray( $yetiAttrNames[$i], "." ); //Always size 3 because we made these in the above code.
$attrValue = $translatedYetiFiles[$i];
$shape = $attrComponents[0];
$node = $attrComponents[1];
$attr = $attrComponents[2];
if( `filetest -f $attrValue` ) {
catch( `pgYetiGraph -node $node -param $attr -setParamValueString $attrValue $shape` );
}
}
}
global proc delete_old_assets( string $cacheDir, int $deleteTimer )
{
string $cachePaths[];
tokenize $cacheDir ">" $cachePaths;
delete_assets( $cachePaths[0], $deleteTimer );
}
string $dl_ignorable_asset_attributes[] = {"rmanGlobals.imageOutputDir","rmanGlobals.ribOutputDir"};
proc int is_ignorable_attribute( string $attr )
{
global string $dl_ignorable_asset_attributes[];
return `stringArrayContains $attr $dl_ignorable_asset_attributes`;
}
global proc string[] find_asset_paths()
{
filePathEditor -refresh;
string $file_paths[];
string $dirs[] = `filePathEditor -q -listDirectories ""`;
for( $dir in $dirs )
{
string $files[] = `filePathEditor -q -withAttribute -listFiles $dir`;
int $i;
for( $i = 0; $i<size($files); $i+=2)
{
if( !`is_ignorable_attribute $files[$i+1]` )
{
string $file_path = $dir + "/" + $files[$i];
print("Found Asset: " + $files[$i+1] + " - " + $file_path + "\n" );
$file_paths[ size($file_paths) ] = $dir + "/" + $files[$i]; //append
}
}
}
return $file_paths;
}
global proc do_local_asset_caching( string $localAssetCacheDirectory, string $semiColonSeparatedNetworkPrefixes, string $slaveLACDaysToDelete )
{
string $attrNames[0];
string $cacheFileObjectNames[0];
string $yetiAttrNames[0];
string $sourceFiles[0];
string $redshiftCacheFiles[0];
string $destFiles[0];
string $gatheredSourceFiles[0];
string $gatheredRedshiftSourceFiles[0];
//identify all attributes that contain filenames
get_filename_attributes_from_scene( $attrNames, $cacheFileObjectNames, $yetiAttrNames );
//of the identified files above, filter out the ones that aren't in a "network" location.
string $networkPrefixes[] = stringToStringArray( $semiColonSeparatedNetworkPrefixes, ";" );
get_network_assets( $attrNames, "basic", $networkPrefixes );
get_network_assets( $cacheFileObjectNames, "cacheFile", $networkPrefixes );
get_network_assets( $yetiAttrNames, "yeti", $networkPrefixes );
//copy the asset files locally (if needed, based on their .manifest files)
get_files_to_copy( $localAssetCacheDirectory, $attrNames, $cacheFileObjectNames, $yetiAttrNames, $sourceFiles, $destFiles );
gather_source_file_copies( $localAssetCacheDirectory, $sourceFiles, $gatheredSourceFiles );
if( currentRenderer() == "redshift")
{
get_redshift_cache_files( $sourceFiles, $redshiftCacheFiles );
gather_source_file_copies( $localAssetCacheDirectory, $redshiftCacheFiles, $gatheredSourceFiles );
}
//change the asset paths in the scene to point at the ones in the cache folder
repath_assets( $localAssetCacheDirectory, $attrNames, $cacheFileObjectNames, $yetiAttrNames );
//does a cleanup pass through the local asset cahce directory, checks their manifests, and deletes them if they're too old.
delete_old_assets( $localAssetCacheDirectory, $slaveLACDaysToDelete );
}

View file

@ -0,0 +1,67 @@
// adrian.graham@autodesk.com
// 01/24/14
//
// Spit out Bifrost-related data. Useful in the "Pre-Render Frame MEL" field of
// Render Options, so batch processing (playblasting, exporting meshes, etc)
// can be closely monitored.
//
// Usage: simply put 'bifrostMemUsage' in the Pre-Render Frame MEL field under
// Render Globals > Render Options.
//
// Also handy to put in a per-frame expression.
global float $ELAPSED;
global proc BifrostMemUsage() {
// Initialize python.
global float $ELAPSED;
python( "import time" );
float $time = python( "'%.02f' % (time.time()-float("+$ELAPSED+"))" );
print( "[bifrostMemUsage] ELAPSED since last eval: "+$time+" seconds.\n" );
$ELAPSED = python( "time.time()" );
// Get data.
string $date_time = `date`;
float $frame = `currentTime -q`;
string $free = `memory -mb -freeMemory`;
string $used = `memory -mb -heapMemory`;
// Make Mb amounts nicer.
python( "import locale" );
python( "locale.setlocale(locale.LC_ALL, '')" );
$free = python( "format( "+int($free)+", 'n' ).decode(locale.getpreferredencoding())" );
$used = python( "format( "+int($used)+", 'n' ).decode(locale.getpreferredencoding())" );
string $mem_output = "[bifrostMemUsage] FRAME "+$frame+" ("+$date_time+")\n";
$mem_output += "[bifrostMemUsage] USED: "+$used+" Mb\n";
$mem_output += "[bifrostMemUsage] FREE: "+$free+" Mb\n";
string $containers[] = `ls -type bifrostContainer`;
for( $container in $containers ) {
$mem_output += " >>> "+$container+" <<<\n";
if( `objExists ($container+".masterVoxelSize")` ) {
float $mvs = `getAttr ($container+".masterVoxelSize")`;
$mem_output += " MVS: "+$mvs+"\n" ;
} // end if
// Get associated Bifrost object.
string $bifrost_objs[] = `listConnections -shapes on -type bifrostShape $container`;
string $bifrost_obj = $bifrost_objs[0];
string $num_parts = `getAttr( $bifrost_obj+".outNumParticles")`;
string $num_voxels = `getAttr( $bifrost_obj+".outNumVoxels")`;
$num_parts = python( "format( "+int($num_parts)+", 'n' ).decode(locale.getpreferredencoding())" );
$num_voxels = python( "format( "+int($num_voxels)+", 'n' ).decode(locale.getpreferredencoding())" );
$mem_output += " Particles: "+$num_parts+"\n" ;
$mem_output += " Voxels: "+$num_voxels+"\n" ;
}
$mem_output += "\n";
// Finally, print out.
print( $mem_output );
} // end bifrostMemUsage

View file

@ -0,0 +1,239 @@
from __future__ import print_function
import json
import os
import re
import subprocess
import maya.cmds
import maya.mel
# The version that Redshift fixed the render layer render setup override locking issue
# Prior versions will need to use the workaround in the unlockRenderSetupOverrides function
REDSHIFT_RENDER_SETUP_FIX_VERSION = (2, 5, 64)
def getCurrentRenderLayer():
return maya.cmds.editRenderLayerGlobals( query=True, currentRenderLayer=True )
# A method mimicing the built-in mel function: 'renderLayerDisplayName', but first tries to see if it exists
def getRenderLayerDisplayName( layer_name ):
if maya.mel.eval( 'exists renderLayerDisplayName' ):
layer_name = maya.mel.eval( 'renderLayerDisplayName ' + layer_name )
else:
# renderLayerDisplayName doesn't exist, so we try to do it ourselves
if layer_name == 'masterLayer':
return layer_name
if maya.cmds.objExists(layer_name) and maya.cmds.nodeType( layer_name ) == 'renderLayer':
# Display name for default render layer
if maya.cmds.getAttr( layer_name + '.identification' ) == 0:
return 'masterLayer'
# If Render Setup is used the corresponding Render Setup layer name should be used instead of the legacy render layer name.
result = maya.cmds.listConnections( layer_name + '.msg', type='renderSetupLayer' )
if result:
return result[0]
return layer_name
# remove_override_json_string is a json string consisting of a node as a key, with a list of attributes we want to unlock as the value
# ie. remove_override_json_string = '{ "defaultRenderGlobals": [ "animation", "startFrame", "endFrame" ] }'
def unlockRenderSetupOverrides( remove_overrides_json_string ):
try:
# Ensure we're in a version that HAS render setups
import maya.app.renderSetup.model.renderSetup as renderSetup
except ImportError:
return
# Ensure that the scene is actively using render setups and not the legacy layers
if not maya.mel.eval( 'exists mayaHasRenderSetup' ) or not maya.mel.eval( 'mayaHasRenderSetup();' ):
return
# If the version of Redshift has the bug fix, bypass the overrides
if not redshiftRequiresWorkaround():
return
remove_overrides = json.loads( remove_overrides_json_string )
render_setup = renderSetup.instance()
layers = render_setup.getRenderLayers()
layers_to_unlock = [ layer for layer in layers if layer.name() != 'defaultRenderLayer' ]
for render_layer in layers_to_unlock:
print('Disabling Render Setup Overrides in "%s"' % render_layer.name())
for collection in render_layer.getCollections():
if type(collection) == maya.app.renderSetup.model.collection.RenderSettingsCollection:
for override in collection.getOverrides():
if override.targetNodeName() in remove_overrides and override.attributeName() in remove_overrides[ override.targetNodeName() ]:
print( ' Disabling Override: %s.%s' % ( override.targetNodeName(), override.attributeName() ) )
override.setSelfEnabled( False )
def redshiftRequiresWorkaround():
# Get the version of Redshift
redshiftVersion = maya.cmds.pluginInfo( 'redshift4maya', query=True, version=True )
redshiftVersion = tuple( int(version) for version in redshiftVersion.split('.') )
# Check if the Redshift version is prior to the bug fix
return redshiftVersion < REDSHIFT_RENDER_SETUP_FIX_VERSION
def performArnoldPathmapping( startFrame, endFrame, tempLocation=None ):
"""
Performs pathmapping on all arnold standin files that are need for the current task
:param startFrame: Start frame of the task
:param endFrame: End frame of the task
:param tempLocation: The temporary location where all pathmapped files will be copied to. Only needs to be provided the first time this function is called.
:return: Nothing
"""
if tempLocation:
performArnoldPathmapping.tempLocation = tempLocation
else:
if not performArnoldPathmapping.tempLocation:
raise ValueError( "The first call made to performArnoldPathmapping must provided a tempLocation" )
#a simple regex for finding frame numbers
frameRE = re.compile( r'#+' )
# Define a function that will be used when looping to replace padding with a 0 padded string.
def __replaceHashesWithZeroPaddedFrame( frameNum, origFileName ):
return frameRE.sub( lambda matchObj: str( frameNum ).zfill( len(matchObj.group(0)) ), origFileName )
standInObjects = maya.cmds.ls( type="aiStandIn" )
for standIn in standInObjects:
try:
# If we have already seen this node before then grab the settings that we need
origDir, origFileName = performArnoldPathmapping.originalProperties[ standIn ]
except KeyError:
# If we have not seen this node before then store it's original path and update the path in the node to where we will be pathmapping the file.
standinFile = maya.cmds.getAttr( standIn + ".dso" )
if not standinFile or os.path.splitext( standinFile )[ 1 ].lower() != ".ass":
# If the standinFile isn't set or isn't .ass file then we cannot pathmap it.
continue
origDir, origFileName = os.path.split( standinFile )
standinTempLocation = os.path.join( performArnoldPathmapping.tempLocation, standIn )
maya.cmds.setAttr( "%s.dso" % standIn, os.path.join( standinTempLocation, origFileName ), type="string" )
#Create the Temp directory the first time we see a new standin
if not os.path.isdir( standinTempLocation ):
os.makedirs( standinTempLocation )
performArnoldPathmapping.originalProperties[ standIn ] = (origDir, origFileName)
for frame in range( startFrame, endFrame + 1 ):
# evaluate the frame that the node is using (Normally it will be the same as the scene but it can be different)
evalFrame = maya.cmds.getAttr( "%s.frameNumber" % standIn, time=frame )
fileNameWithFrame = __replaceHashesWithZeroPaddedFrame( evalFrame, origFileName )
# If we have already mapped this file then continue.
if not ( standIn, fileNameWithFrame ) in performArnoldPathmapping.mappedFiles:
#Perform pathmapping
runPathmappingOnFile(
os.path.join( origDir, fileNameWithFrame ),
os.path.join( performArnoldPathmapping.tempLocation, standIn, fileNameWithFrame )
)
performArnoldPathmapping.mappedFiles.add( ( standIn, fileNameWithFrame ) )
performArnoldPathmapping.tempLocation = ""
#State property which contains mappings of standin objects to their original fileproperties
performArnoldPathmapping.originalProperties = {}
#State property which contains unique identifier for each file that we have already mapped in the form of ( standin, filename )
performArnoldPathmapping.mappedFiles=set()
def runPathmappingOnFile( originalLocation, pathmappedLocation ):
print( 'Running PathMapping on "%s" and copying to "%s"' % (originalLocation, pathmappedLocation) )
arguments = [ "-CheckPathMappingInFile", originalLocation, pathmappedLocation ]
print( CallDeadlineCommand( arguments ) )
def GetDeadlineCommand():
deadlineBin = ""
try:
deadlineBin = os.environ['DEADLINE_PATH']
except KeyError:
#if the error is a key error it means that DEADLINE_PATH is not set. however Deadline command may be in the PATH or on OSX it could be in the file /Users/Shared/Thinkbox/DEADLINE_PATH
pass
# On OSX, we look for the DEADLINE_PATH file if the environment variable does not exist.
if deadlineBin == "" and os.path.exists( "/Users/Shared/Thinkbox/DEADLINE_PATH" ):
with open( "/Users/Shared/Thinkbox/DEADLINE_PATH" ) as f:
deadlineBin = f.read().strip()
deadlineCommand = os.path.join(deadlineBin, "deadlinecommand")
return deadlineCommand
def CallDeadlineCommand(arguments, hideWindow=True):
deadlineCommand = GetDeadlineCommand()
startupinfo = None
creationflags = 0
if os.name == 'nt':
if hideWindow:
# Python 2.6 has subprocess.STARTF_USESHOWWINDOW, and Python 2.7 has subprocess._subprocess.STARTF_USESHOWWINDOW, so check for both.
if hasattr( subprocess, '_subprocess' ) and hasattr( subprocess._subprocess, 'STARTF_USESHOWWINDOW' ):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW
elif hasattr( subprocess, 'STARTF_USESHOWWINDOW' ):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
else:
# still show top-level windows, but don't show a console window
CREATE_NO_WINDOW = 0x08000000 #MSDN process creation flag
creationflags = CREATE_NO_WINDOW
arguments.insert( 0, deadlineCommand )
# Specifying PIPE for all handles to workaround a Python bug on Windows. The unused handles are then closed immediatley afterwards.
proc = subprocess.Popen(arguments, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo, creationflags=creationflags)
output, errors = proc.communicate()
return output
def OutputPluginVersions():
print("================== PLUGINS ===================\n")
plugins = sorted(maya.cmds.pluginInfo(query=True, listPlugins=True), key=lambda p: p.lower())
for plugin in plugins:
version = maya.cmds.pluginInfo(plugin, query=True, version=True)
print("%s (v%s)" % (plugin, version))
print("==============================================\n")
def ForceLoadPlugins():
"""
Force load an explicit set of plug-ins with known issues. There are bugs in Maya where these plug-ins are not
automatically loaded when required in a scene.
When a scene contains an Alembic reference node (backed by an external .abc file), Maya does not embed "requires"
statements into the scene to indicate that the "AbcImport" and "fbxmaya" plug-ins are dependencies of the scene.
This can be changed for the current Maya session with the following MEL commands:
pluginInfo -edit -writeRequires AbcImport
pluginInfo -edit -writeRequires fbxmaya
However, there is a secondary bug where the "requires" statements are inserted in the scene after already trying to
load the references.
Our work-around is to force loading of these plug-ins always before loading the job scene. Both plugins ship with
Maya and are fairly lightweight in size.
"""
PLUGINS_TO_LOAD = (
'AbcImport', # For Maya 2017 on Windows this is 5MB and takes 15 ms to load
'fbxmaya' # For Maya 2017 on Windows this is 12MB and takes 141ms to load
)
for plugin in PLUGINS_TO_LOAD:
plugin_loaded = maya.cmds.pluginInfo(plugin, query=True, loaded=True)
if not plugin_loaded:
try:
print( "Loading %s..." % plugin, end="" )
maya.cmds.loadPlugin( plugin )
except RuntimeError as e:
# Maya raises this exception when it cannot find the plugin. The message is formatted as:
#
# Plug-in, "pluginName", was not found on MAYA_PLUG_IN_PATH
#
# This seems reasonable enough to forward on to the user. The try-except only serves the purpose of
# continuing to attempt additional plug-ins. This is a best-effort work-around.
print( 'Error: %s' % e)
else:
print( "ok" )

View file

@ -0,0 +1,217 @@
proc string deadlineMapIndividualFile( string $origFile, int $checkExistence )
{
if( $origFile == "" )
{
return $origFile;
}
string $mappedFile = `dirmap -cd $origFile`;
if( $mappedFile != $origFile && $checkExistence )
{
if( catchQuiet(` python( "import maya.app.general.fileTexturePathResolver" )` ) )
{
print( "Unable to Import FileTexturePathResolver ignoring file existence check for pathmapping.\n" );
return $mappedFile;
}
string $resolvedFiles[] = `python( "maya.app.general.fileTexturePathResolver.findAllFilesForPattern('" + $mappedFile + "', None)" )`;
if( size( $resolvedFiles ) == 0 )
{
print( "Failed to Pathmap " + $origFile+ " no files exist that match the following mapped filename " + $mappedFile + "\n" );
$mappedFile = $origFile;
}
}
return $mappedFile;
}
//this function is used to remap all file paths of certain node type that include tokens. It is currently being used to fix an issue with the dirmap command.
global proc remapNodeFilePathsWithTokens( string $nodeType, string $pathAttr, int $checkExistence )
{
string $fileNodes[] = `ls -type $nodeType`;
for( $fileNode in $fileNodes )
{
string $fileAttr = $fileNode + "." + $pathAttr;
string $curFile = `getAttr $fileAttr`;
string $mappedFile = `deadlineMapIndividualFile $curFile $checkExistence`;
if( $mappedFile != $curFile )
{
print( "Changing "+$fileAttr+ " from " + $curFile + " to " + $mappedFile+"\n" );
setAttr -type "string" $fileAttr $mappedFile;
}
}
}
global proc mapOpenColorIOFile( int $enable )
{
string $origConfigPath = "";
// Ensure that the colorManagementPrefs command exists, last confirmed version is 2015 since docs aren't available before then
if( catchQuiet( $origConfigPath = `colorManagementPrefs -q -configFilePath` ) )
{
return;
}
string $mappedConfigPath = `deadlineMapIndividualFile $origConfigPath false`;
if( $mappedConfigPath != $origConfigPath )
{
print( "Changing OCIO Config File from " + $origConfigPath + " to " + $mappedConfigPath+"\n" );
colorManagementPrefs -e -configFilePath $mappedConfigPath;
colorManagementPrefs -e -cmConfigFileEnabled $enable;
}
}
proc string evaluateXgenVariable(string $variable)
{
// Computes the expanded XGen variable.
// This does not work for description or object/module level variables.
// xg.expandFilepath and its arguments are listed in the following header file:
//
// <MAYA_INSTALL_DIR>\plug-ins\xgen\include\XGen\XgExternalAPI.h
//
// NOTE: This is not documented in XGen's python API docs, but is exposed via the xgenm.XgExternalAPI module
return python("xg.expandFilepath('" + $variable + "', '')");
}
proc string[] evaluateXGenVariables()
{
// Computes an array of all XGen variables that need to be expanded for path mapping XGen attributes
// This function evaluates the following XGen path variables:
// - ${HOME}
// - ${XGEN_ROOT}
// - ${XGEN_LOCATION}
// - ${PROJECT}
// The function returns a one-dimensional array (MEL only supports 1D arrays) containing the mapping of the
// variable to its evaluated value. Each odd-indexed entry is a string containing the unexpanded variable. Each
// subsequent even-indexed entry contains the expanded value of the variable.
// Example:
// > print(evaluateXGenVariables());
// {"${HOME}", "C:/Users/usiskin/Documents", "${XGEN_ROOT}", "C:/Assets/Maya/xgen", ... }
// Array of XGen variables to be expanded
string $xgenVariables[] = {
"${HOME}",
"${XGEN_ROOT}",
"${XGEN_LOCATION}",
"${PROJECT}"
};
// Resulting array that will contains interleaved xgen variables and their evaluated values
string $xgenValues[] = {};
// Temporary working variables
string $xgenVariable;
string $xgenValue;
int $i = 0;
// Evaluate all XGen variables
for( $xgenVariable in $xgenVariables )
{
$xgenValue = evaluateXgenVariable( $xgenVariable );
stringArrayInsertAtIndex($i++, $xgenValues, $xgenVariable);
stringArrayInsertAtIndex($i++, $xgenValues, $xgenValue);
}
return $xgenValues;
}
proc mapXgenAttributes(string $palette, string $description, string $object, string $deadlineMappings[], string $xgenVarMap[] )
{
int $i = 0;
string $attributes[] = python("xg.attrs( \"" + $palette + "\", \"" + $description + "\", \"" + $object + "\" )");
for ($attribute in $attributes)
{
string $attrVal = python("xg.getAttr( \"" + $attribute + "\",\"" + $palette + "\", \"" + $description + "\", \"" + $object + "\" )");
// Replace slashes so that they are all forward slashes. Maya (and XGen) expect only forward slashes in paths,
// regardless of platform.
$attrVal = substituteAllString($attrVal, "\\", "/");
// Make a copy of the attribute value that we will use to expand XGen variables and run path mapping on.
string $newAttrVal = $attrVal;
// Substitue the XGen collection variables with their evaluated values (if we were able to evaluate them)
for( $i = 0; $i < size($xgenVarMap); $i += 2 )
{
string $xgenVariable = $xgenVarMap[$i];
string $xgenValue = $xgenVarMap[$i + 1];
if ( size( $xgenValue ) > 0 )
{
$newAttrVal = substituteAllString($newAttrVal, $xgenVariable, $xgenValue);
}
}
// Run the path mapping string substitutions
for( $i = 0; $i < size($deadlineMappings); $i += 2 )
{
$newAttrVal = substituteAllString($newAttrVal, $deadlineMappings[ $i ], $deadlineMappings[ $i + 1 ] );
}
if ( $newAttrVal != $attrVal )
{
// Escape quotes in the attribute values so that they can be used in a python statement
string $escapedNewAttrVal = substituteAllString($newAttrVal, "\"", "\\\"");
string $command = "xg.setAttr( \"" + $attribute + "\",\"" + $escapedNewAttrVal + "\",\"" + $palette + "\", \"" + $description + "\", \"" + $object + "\" )";
python($command);
// Build full attribute path
string $fullAttribute = $palette + ".";
if( size($description) )
{
$fullAttribute += $description + ".";
}
if( size($object) )
{
$fullAttribute += $object + ".";
}
$fullAttribute += $attribute;
print ( "Changing '" + $fullAttribute + "' from '" + $attrVal + "' To '" + $newAttrVal + "'\n" );
}
}
}
global proc mapXGen( string $deadlineMappings[] )
{
catchQuiet ( python("import xgenm as xg") );
string $palettes[];
clear $palettes;
catchQuiet ( $palettes = python("xg.palettes()") );
if ( `size $palettes` == 0 )
return;
string $descriptions[];
string $objects[];
// Evaluate all XGen variables required for path mapping
string $xgenVarMap[] = evaluateXGenVariables();
for ( $palette in $palettes )
{
mapXgenAttributes($palette, "", "", $deadlineMappings, $xgenVarMap);
clear $descriptions;
$descriptions = python("xg.descriptions( \"" + $palette + "\" )");
for ( $description in $descriptions )
{
mapXgenAttributes($palette, $description, "", $deadlineMappings, $xgenVarMap);
clear $objects;
$objects = python("xg.objects( \"" + $palette + "\", \"" + $description + "\" )");
$objects = stringArrayCatenate( $objects, `python("xg.fxModules( \"" + $palette + "\", \"" + $description + "\" )")` );
for ( $object in $objects )
{
mapXgenAttributes($palette, $description, $object, $deadlineMappings, $xgenVarMap);
}
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,154 @@
[About]
Type=label
Label=About
Category=About Plugin
CategoryOrder=-1
Index=0
Default=Pype Maya Batch Wrapper Plugin for Deadline
Description=Not configurable
[ConcurrentTasks]
Type=label
Label=ConcurrentTasks
Category=About Plugin
CategoryOrder=-1
Index=0
Default=True
Description=Not configurable
[PypeWrapper]
Type=multilinemultifilename
Category=Pype Wrapper Script
CategoryOrder=0
Index=15
Label=Pype Wrapper Script locations
Default=P:\pype\launchers\pype-wrapper\MayaPype.bat;/mnt/pipeline/pype/pype-wrapper/MayaPype.sh;/Volumes/pipeline/pype/pype-wrapper/MayaPype.zsh
Description=The path to the Pype Wrapper script. Enter alternative paths on separate lines.
[MaxwellInteractiveSlaves]
Type=slavelist
Category=Maxwell For Maya (version 2 and later)
CategoryOrder=1
Index=0
Label=Workers To Use Interactive License
Default=
Description=A list of Workers that should use an interactive Maxwell license instead of a render license. Use a , to separate multiple Worker names, for example: worker001,worker002,worker003
[EnablePathMapping]
Type=boolean
Category=Path Mapping For Scene Files (For Mixed Farms)
CategoryOrder=2
CategoryIndex=0
Label=Enable Path Mapping
Default=true
Description=If enabled, path mapping will be performed on the contents of the Maya scene file.
[PathMappingMode]
Type=enum
Values=Use dirmap Command;Text Replacement (.ma files only)
Category=Path Mapping For Scene Files (For Mixed Farms)
CategoryOrder=2
CategoryIndex=1
Label=Path Mapping Mode
Default=Use dirmap Command
Description=The first option uses Maya's 'dirmap' command to map paths when the scene is loaded, which works on .ma and.mb files. The second option creates a local copy of the .ma file, and uses text replacement on the file to map paths.
[XGenPathMapping]
Type=boolean
Category=Path Mapping For XGenFiles
CategoryOrder=2
CategoryIndex=2
Label=Enable XGen Path Mapping
Default=true
Description=If enabled, path mapping will be performed on the contents of XGen files.
[SuppressWarnings]
Type=boolean
Category=Logging
CategoryOrder=3
CategoryIndex=0
Label=Suppress Warning Messages
Default=false
Description=If enabled, warning messages printed out by Maya will not be included in the render log.
[SilenceSceneLoadErrors]
Type=boolean
Category=Logging
CategoryOrder=3
CategoryIndex=1
Label=Silence Scene Load Errors
Default=false
Description=If enabled, errors that occur when loading a scene file are silenced by Maya and will not be included in the render log.
[WriteScriptToLog]
Type=boolean
Category=Logging
CategoryOrder=3
CategoryIndex=2
Label=Log Script Contents to Render Log
Default=false
Description=If enabled, or if an error occurs, the full script that Deadline is passing to Maya will be written to the render log. This functionality is useful for debugging purposes, and providing additional information to support.
[LimitThreadsToCPUAffinity]
Type=boolean
Category=CPU Affinity
CategoryOrder=4
CategoryIndex=0
Label=Limit Threads to CPU Affinity
Default=false
[AbortOnArnoldLicenseFail]
Type=boolean
Category=Arnold Options
CategoryOrder=5
CategoryIndex=0
Label=Abort On Arnold License Fail
Default=true
Description=If enabled, the render will fail if Arnold cannot get a license. If disabled, Arnold will will render with a watermark if it cannot get a license (Only applies when Arnold is the Renderer).
[RemoteAssetPaths]
Type=multilinestring
Category=Local Asset Caching
CategoryOrder=6
CategoryIndex=0
Label=Remote Asset Path
Default=//;X:;Y:;Z:
Description=Assets whose paths begin with these paths will be copied to the Worker's local asset cache directory and be remapped in the scene file.
[SlaveLACDirectoryWindows]
Type=folder
Category=Local Asset Caching
CategoryOrder=6
CategoryIndex=1
Label=Worker LAC Directory (Windows)
Default=
Description=Windows Worker's local storage location for cached assets (accepts environment variables). If blank, defaults to Deadline's temp folder on the Worker. ie. %temp%/Thinkbox/DeadlineX/temp/MayaCache
[SlaveLACDirectoryOSX]
Type=folder
Category=Local Asset Caching
CategoryOrder=6
CategoryIndex=2
Label=Worker LAC Directory (OSX)
Default=
Description=OSX Worker's local storage location for cached assets (accepts environment variables). If blank, defaults to Deadline's temp folder on the Worker. ie. /Users/home/[user]/.local/share/Thinkbox/DeadlineX/temp/MayaCache
[SlaveLACDirectoryLinux]
Type=folder
Category=Local Asset Caching
CategoryOrder=6
CategoryIndex=3
Label=Worker LAC Directory (Linux)
Default=
Description=Linux Worker's local storage location for cached assets (accepts environment variables). If blank, defaults to Deadline's temp folder on the Worker. ie. /home/[user]/.local/share/Thinkbox/DeadlineX/temp/MayaCache
[SlaveLACDaysToDelete]
Type=integer
Category=Local Asset Caching
CategoryOrder=6
CategoryIndex=4
Label=Days until Cache Delete
Minimum=0
Maximum=99
Default=5
Description=Cache files will be deleted once this many days has been reached in between accesses.

File diff suppressed because it is too large Load diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View file

@ -0,0 +1,35 @@
[OIIOToolPath]
Type=filename
Label=OIIO Tool location
Category=OIIO
Index=0
Description=OIIO Tool executable to use.
Required=false
DisableIfBlank=true
[OutputFile]
Type=filenamesave
Label=Output File
Category=Output
Index=0
Description=The scene filename as it exists on the network
Required=false
DisableIfBlank=true
[CleanupTiles]
Type=boolean
Category=Options
Index=0
Label=Cleanup Tiles
Required=false
DisableIfBlank=true
Description=If enabled, the Pype Tile Assembler will cleanup all tiles after assembly.
[Renderer]
Type=string
Label=Renderer
Category=Quicktime Info
Index=0
Description=Renderer name
Required=false
DisableIfBlank=true

View file

@ -0,0 +1,17 @@
[About]
Type=label
Label=About
Category=About Plugin
CategoryOrder=-1
Index=0
Default=Pype Tile Assembler Plugin for Deadline
Description=Not configurable
[OIIOTool_RenderExecutable]
Type=multilinemultifilename
Label=OIIO Tool Executable
Category=Render Executables
CategoryOrder=0
Default=C:\Program Files\OIIO\bin\oiiotool.exe;/usr/bin/oiiotool
Description=The path to the Open Image IO Tool executable file used for rendering. Enter alternative paths on separate lines.
W

View file

@ -0,0 +1,372 @@
# -*- coding: utf-8 -*-
"""Tile Assembler Plugin using Open Image IO tool.
Todo:
Currently we support only EXRs with their data window set.
"""
import os
import subprocess
from xml.dom import minidom
from System.IO import Path
from Deadline.Plugins import DeadlinePlugin
from Deadline.Scripting import (
FileUtils, RepositoryUtils, SystemUtils)
INT_KEYS = {
"x", "y", "height", "width", "full_x", "full_y",
"full_width", "full_height", "full_depth", "full_z",
"tile_width", "tile_height", "tile_depth", "deep", "depth",
"nchannels", "z_channel", "alpha_channel", "subimages"
}
LIST_KEYS = {
"channelnames"
}
def GetDeadlinePlugin(): # noqa: N802
"""Helper."""
return PypeTileAssembler()
def CleanupDeadlinePlugin(deadlinePlugin): # noqa: N802, N803
"""Helper."""
deadlinePlugin.cleanup()
class PypeTileAssembler(DeadlinePlugin):
"""Deadline plugin for assembling tiles using OIIO."""
def __init__(self):
"""Init."""
self.InitializeProcessCallback += self.initialize_process
self.RenderExecutableCallback += self.render_executable
self.RenderArgumentCallback += self.render_argument
self.PreRenderTasksCallback += self.pre_render_tasks
self.PostRenderTasksCallback += self.post_render_tasks
def cleanup(self):
"""Cleanup function."""
for stdoutHandler in self.StdoutHandlers:
del stdoutHandler.HandleCallback
del self.InitializeProcessCallback
del self.RenderExecutableCallback
del self.RenderArgumentCallback
del self.PreRenderTasksCallback
del self.PostRenderTasksCallback
def initialize_process(self):
"""Initialization."""
self.SingleFramesOnly = True
self.StdoutHandling = True
self.renderer = self.GetPluginInfoEntryWithDefault(
"Renderer", "undefined")
self.AddStdoutHandlerCallback(
".*Error.*").HandleCallback += self.handle_stdout_error
def render_executable(self):
"""Get render executable name.
Get paths from plugin configuration, find executable and return it.
Returns:
(str): Render executable.
"""
oiiotool_exe_list = self.GetConfigEntry("OIIOTool_RenderExecutable")
oiiotool_exe = FileUtils.SearchFileList(oiiotool_exe_list)
if oiiotool_exe == "":
self.FailRender(("No file found in the semicolon separated "
"list \"{}\". The path to the render executable "
"can be configured from the Plugin Configuration "
"in the Deadline Monitor.").format(
oiiotool_exe_list))
return oiiotool_exe
def render_argument(self):
"""Generate command line arguments for render executable.
Returns:
(str): arguments to add to render executable.
"""
# Read tile config file. This file is in compatible format with
# Draft Tile Assembler
data = {}
with open(self.config_file, "rU") as f:
for text in f:
# Parsing key-value pair and removing white-space
# around the entries
info = [x.strip() for x in text.split("=", 1)]
if len(info) > 1:
try:
data[str(info[0])] = info[1]
except Exception as e:
# should never be called
self.FailRender(
"Cannot parse config file: {}".format(e))
# Get output file. We support only EXRs now.
output_file = data["ImageFileName"]
output_file = RepositoryUtils.CheckPathMapping(output_file)
output_file = self.process_path(output_file)
"""
_, ext = os.path.splitext(output_file)
if "exr" not in ext:
self.FailRender(
"[{}] Only EXR format is supported for now.".format(ext))
"""
tile_info = []
for tile in range(int(data["TileCount"])):
tile_info.append({
"filepath": data["Tile{}".format(tile)],
"pos_x": int(data["Tile{}X".format(tile)]),
"pos_y": int(data["Tile{}Y".format(tile)]),
"height": int(data["Tile{}Height".format(tile)]),
"width": int(data["Tile{}Width".format(tile)])
})
# FFMpeg doesn't support tile coordinates at the moment.
# arguments = self.tile_completer_ffmpeg_args(
# int(data["ImageWidth"]), int(data["ImageHeight"]),
# tile_info, output_file)
arguments = self.tile_oiio_args(
int(data["ImageWidth"]), int(data["ImageHeight"]),
tile_info, output_file)
self.LogInfo(
"Using arguments: {}".format(" ".join(arguments)))
self.tiles = tile_info
return " ".join(arguments)
def process_path(self, filepath):
"""Handle slashes in file paths."""
if SystemUtils.IsRunningOnWindows():
filepath = filepath.replace("/", "\\")
if filepath.startswith("\\") and not filepath.startswith("\\\\"):
filepath = "\\" + filepath
else:
filepath = filepath.replace("\\", "/")
return filepath
def pre_render_tasks(self):
"""Load config file and do remapping."""
self.LogInfo("Pype Tile Assembler starting...")
scene_filename = self.GetDataFilename()
temp_scene_directory = self.CreateTempDirectory(
"thread" + str(self.GetThreadNumber()))
temp_scene_filename = Path.GetFileName(scene_filename)
self.config_file = Path.Combine(
temp_scene_directory, temp_scene_filename)
if SystemUtils.IsRunningOnWindows():
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
scene_filename, self.config_file, "/", "\\")
else:
RepositoryUtils.CheckPathMappingInFileAndReplaceSeparator(
scene_filename, self.config_file, "\\", "/")
os.chmod(self.config_file, os.stat(self.config_file).st_mode)
def post_render_tasks(self):
"""Cleanup tiles if required."""
if self.GetBooleanPluginInfoEntryWithDefault("CleanupTiles", False):
self.LogInfo("Cleaning up Tiles...")
for tile in self.tiles:
try:
self.LogInfo("Deleting: {}".format(tile["filepath"]))
os.remove(tile["filepath"])
# By this time we would have errored out
# if error on missing was enabled
except KeyError:
pass
except OSError:
self.LogInfo("Failed to delete: {}".format(
tile["filepath"]))
pass
self.LogInfo("Pype Tile Assembler Job finished.")
def handle_stdout_error(self):
"""Handle errors in stdout."""
self.FailRender(self.GetRegexMatch(0))
def tile_oiio_args(
self, output_width, output_height, tile_info, output_path):
"""Generate oiio tool arguments for tile assembly.
Args:
output_width (int): Width of output image.
output_height (int): Height of output image.
tiles_info (list): List of tile items, each item must be
dictionary with `filepath`, `pos_x` and `pos_y` keys
representing path to file and x, y coordinates on output
image where top-left point of tile item should start.
output_path (str): Path to file where should be output stored.
Returns:
(list): oiio tools arguments.
"""
args = []
# Create new image with output resolution, and with same type and
# channels as input
first_tile_path = tile_info[0]["filepath"]
first_tile_info = self.info_about_input(first_tile_path)
create_arg_template = "--create{} {}x{} {}"
image_type = ""
image_format = first_tile_info.get("format")
if image_format:
image_type = ":type={}".format(image_format)
create_arg = create_arg_template.format(
image_type, output_width,
output_height, first_tile_info["nchannels"]
)
args.append(create_arg)
for tile in tile_info:
path = tile["filepath"]
pos_x = tile["pos_x"]
tile_height = self.info_about_input(path)["height"]
if self.renderer == "vray":
pos_y = tile["pos_y"]
else:
pos_y = output_height - tile["pos_y"] - tile_height
# Add input path and make sure inputs origin is 0, 0
args.append(path)
args.append("--origin +0+0")
# Swap to have input as foreground
args.append("--swap")
# Paste foreground to background
args.append("--paste +{}+{}".format(pos_x, pos_y))
args.append("-o")
args.append(output_path)
return args
def tile_completer_ffmpeg_args(
self, output_width, output_height, tiles_info, output_path):
"""Generate ffmpeg arguments for tile assembly.
Expected inputs are tiled images.
Args:
output_width (int): Width of output image.
output_height (int): Height of output image.
tiles_info (list): List of tile items, each item must be
dictionary with `filepath`, `pos_x` and `pos_y` keys
representing path to file and x, y coordinates on output
image where top-left point of tile item should start.
output_path (str): Path to file where should be output stored.
Returns:
(list): ffmpeg arguments.
"""
previous_name = "base"
ffmpeg_args = []
filter_complex_strs = []
filter_complex_strs.append("nullsrc=size={}x{}[{}]".format(
output_width, output_height, previous_name
))
new_tiles_info = {}
for idx, tile_info in enumerate(tiles_info):
# Add input and store input index
filepath = tile_info["filepath"]
ffmpeg_args.append("-i \"{}\"".format(filepath.replace("\\", "/")))
# Prepare initial filter complex arguments
index_name = "input{}".format(idx)
filter_complex_strs.append(
"[{}]setpts=PTS-STARTPTS[{}]".format(idx, index_name)
)
tile_info["index"] = idx
new_tiles_info[index_name] = tile_info
# Set frames to 1
ffmpeg_args.append("-frames 1")
# Concatenation filter complex arguments
global_index = 1
total_index = len(new_tiles_info)
for index_name, tile_info in new_tiles_info.items():
item_str = (
"[{previous_name}][{index_name}]overlay={pos_x}:{pos_y}"
).format(
previous_name=previous_name,
index_name=index_name,
pos_x=tile_info["pos_x"],
pos_y=tile_info["pos_y"]
)
new_previous = "tmp{}".format(global_index)
if global_index != total_index:
item_str += "[{}]".format(new_previous)
filter_complex_strs.append(item_str)
previous_name = new_previous
global_index += 1
joined_parts = ";".join(filter_complex_strs)
filter_complex_str = "-filter_complex \"{}\"".format(joined_parts)
ffmpeg_args.append(filter_complex_str)
ffmpeg_args.append("-y")
ffmpeg_args.append("\"{}\"".format(output_path))
return ffmpeg_args
def info_about_input(self, input_path):
args = [self.render_executable(), "--info:format=xml", input_path]
popen = subprocess.Popen(
" ".join(args),
shell=True,
stdout=subprocess.PIPE
)
popen_output = popen.communicate()[0].replace(b"\r\n", b"")
xmldoc = minidom.parseString(popen_output)
image_spec = None
for main_child in xmldoc.childNodes:
if main_child.nodeName.lower() == "imagespec":
image_spec = main_child
break
info = {}
if not image_spec:
return info
def child_check(node):
if len(node.childNodes) != 1:
self.FailRender((
"Implementation BUG. Node {} has more children than 1"
).format(node.nodeName))
for child in image_spec.childNodes:
if child.nodeName in LIST_KEYS:
values = []
for node in child.childNodes:
child_check(node)
values.append(node.childNodes[0].nodeValue)
info[child.nodeName] = values
elif child.nodeName in INT_KEYS:
child_check(child)
info[child.nodeName] = int(child.childNodes[0].nodeValue)
else:
child_check(child)
info[child.nodeName] = child.childNodes[0].nodeValue
return info

3
vendor/deadline/readme.md vendored Normal file
View file

@ -0,0 +1,3 @@
## Pype Deadline repository overlay
This directory is overlay for Deadline repository. It means that you can copy whole hierarchy to Deadline repository and it should work.