").append( jQuery.parseHTML( responseText ) ).find( selector ) :
+
+ // Otherwise use the full result
+ responseText );
+
+ }).complete( callback && function( jqXHR, status ) {
+ self.each( callback, response || [ jqXHR.responseText, status, jqXHR ] );
+ });
+ }
+
+ return this;
+};
+
+// Attach a bunch of functions for handling common AJAX events
+jQuery.each( [ "ajaxStart", "ajaxStop", "ajaxComplete", "ajaxError", "ajaxSuccess", "ajaxSend" ], function( i, type ){
+ jQuery.fn[ type ] = function( fn ){
+ return this.on( type, fn );
+ };
+});
+
+jQuery.each( [ "get", "post" ], function( i, method ) {
+ jQuery[ method ] = function( url, data, callback, type ) {
+ // shift arguments if data argument was omitted
+ if ( jQuery.isFunction( data ) ) {
+ type = type || callback;
+ callback = data;
+ data = undefined;
+ }
+
+ return jQuery.ajax({
+ url: url,
+ type: method,
+ dataType: type,
+ data: data,
+ success: callback
+ });
+ };
+});
+
+jQuery.extend({
+
+ // Counter for holding the number of active queries
+ active: 0,
+
+ // Last-Modified header cache for next request
+ lastModified: {},
+ etag: {},
+
+ ajaxSettings: {
+ url: ajaxLocation,
+ type: "GET",
+ isLocal: rlocalProtocol.test( ajaxLocParts[ 1 ] ),
+ global: true,
+ processData: true,
+ async: true,
+ contentType: "application/x-www-form-urlencoded; charset=UTF-8",
+ /*
+ timeout: 0,
+ data: null,
+ dataType: null,
+ username: null,
+ password: null,
+ cache: null,
+ throws: false,
+ traditional: false,
+ headers: {},
+ */
+
+ accepts: {
+ "*": allTypes,
+ text: "text/plain",
+ html: "text/html",
+ xml: "application/xml, text/xml",
+ json: "application/json, text/javascript"
+ },
+
+ contents: {
+ xml: /xml/,
+ html: /html/,
+ json: /json/
+ },
+
+ responseFields: {
+ xml: "responseXML",
+ text: "responseText"
+ },
+
+ // Data converters
+ // Keys separate source (or catchall "*") and destination types with a single space
+ converters: {
+
+ // Convert anything to text
+ "* text": window.String,
+
+ // Text to html (true = no transformation)
+ "text html": true,
+
+ // Evaluate text as a json expression
+ "text json": jQuery.parseJSON,
+
+ // Parse text as xml
+ "text xml": jQuery.parseXML
+ },
+
+ // For options that shouldn't be deep extended:
+ // you can add your own custom options here if
+ // and when you create one that shouldn't be
+ // deep extended (see ajaxExtend)
+ flatOptions: {
+ url: true,
+ context: true
+ }
+ },
+
+ // Creates a full fledged settings object into target
+ // with both ajaxSettings and settings fields.
+ // If target is omitted, writes into ajaxSettings.
+ ajaxSetup: function( target, settings ) {
+ return settings ?
+
+ // Building a settings object
+ ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) :
+
+ // Extending ajaxSettings
+ ajaxExtend( jQuery.ajaxSettings, target );
+ },
+
+ ajaxPrefilter: addToPrefiltersOrTransports( prefilters ),
+ ajaxTransport: addToPrefiltersOrTransports( transports ),
+
+ // Main method
+ ajax: function( url, options ) {
+
+ // If url is an object, simulate pre-1.5 signature
+ if ( typeof url === "object" ) {
+ options = url;
+ url = undefined;
+ }
+
+ // Force options to be an object
+ options = options || {};
+
+ var // Cross-domain detection vars
+ parts,
+ // Loop variable
+ i,
+ // URL without anti-cache param
+ cacheURL,
+ // Response headers as string
+ responseHeadersString,
+ // timeout handle
+ timeoutTimer,
+
+ // To know if global events are to be dispatched
+ fireGlobals,
+
+ transport,
+ // Response headers
+ responseHeaders,
+ // Create the final options object
+ s = jQuery.ajaxSetup( {}, options ),
+ // Callbacks context
+ callbackContext = s.context || s,
+ // Context for global events is callbackContext if it is a DOM node or jQuery collection
+ globalEventContext = s.context && ( callbackContext.nodeType || callbackContext.jquery ) ?
+ jQuery( callbackContext ) :
+ jQuery.event,
+ // Deferreds
+ deferred = jQuery.Deferred(),
+ completeDeferred = jQuery.Callbacks("once memory"),
+ // Status-dependent callbacks
+ statusCode = s.statusCode || {},
+ // Headers (they are sent all at once)
+ requestHeaders = {},
+ requestHeadersNames = {},
+ // The jqXHR state
+ state = 0,
+ // Default abort message
+ strAbort = "canceled",
+ // Fake xhr
+ jqXHR = {
+ readyState: 0,
+
+ // Builds headers hashtable if needed
+ getResponseHeader: function( key ) {
+ var match;
+ if ( state === 2 ) {
+ if ( !responseHeaders ) {
+ responseHeaders = {};
+ while ( (match = rheaders.exec( responseHeadersString )) ) {
+ responseHeaders[ match[1].toLowerCase() ] = match[ 2 ];
+ }
+ }
+ match = responseHeaders[ key.toLowerCase() ];
+ }
+ return match == null ? null : match;
+ },
+
+ // Raw string
+ getAllResponseHeaders: function() {
+ return state === 2 ? responseHeadersString : null;
+ },
+
+ // Caches the header
+ setRequestHeader: function( name, value ) {
+ var lname = name.toLowerCase();
+ if ( !state ) {
+ name = requestHeadersNames[ lname ] = requestHeadersNames[ lname ] || name;
+ requestHeaders[ name ] = value;
+ }
+ return this;
+ },
+
+ // Overrides response content-type header
+ overrideMimeType: function( type ) {
+ if ( !state ) {
+ s.mimeType = type;
+ }
+ return this;
+ },
+
+ // Status-dependent callbacks
+ statusCode: function( map ) {
+ var code;
+ if ( map ) {
+ if ( state < 2 ) {
+ for ( code in map ) {
+ // Lazy-add the new callback in a way that preserves old ones
+ statusCode[ code ] = [ statusCode[ code ], map[ code ] ];
+ }
+ } else {
+ // Execute the appropriate callbacks
+ jqXHR.always( map[ jqXHR.status ] );
+ }
+ }
+ return this;
+ },
+
+ // Cancel the request
+ abort: function( statusText ) {
+ var finalText = statusText || strAbort;
+ if ( transport ) {
+ transport.abort( finalText );
+ }
+ done( 0, finalText );
+ return this;
+ }
+ };
+
+ // Attach deferreds
+ deferred.promise( jqXHR ).complete = completeDeferred.add;
+ jqXHR.success = jqXHR.done;
+ jqXHR.error = jqXHR.fail;
+
+ // Remove hash character (#7531: and string promotion)
+ // Add protocol if not provided (#5866: IE7 issue with protocol-less urls)
+ // Handle falsy url in the settings object (#10093: consistency with old signature)
+ // We also use the url parameter if available
+ s.url = ( ( url || s.url || ajaxLocation ) + "" ).replace( rhash, "" ).replace( rprotocol, ajaxLocParts[ 1 ] + "//" );
+
+ // Alias method option to type as per ticket #12004
+ s.type = options.method || options.type || s.method || s.type;
+
+ // Extract dataTypes list
+ s.dataTypes = jQuery.trim( s.dataType || "*" ).toLowerCase().match( core_rnotwhite ) || [""];
+
+ // A cross-domain request is in order when we have a protocol:host:port mismatch
+ if ( s.crossDomain == null ) {
+ parts = rurl.exec( s.url.toLowerCase() );
+ s.crossDomain = !!( parts &&
+ ( parts[ 1 ] !== ajaxLocParts[ 1 ] || parts[ 2 ] !== ajaxLocParts[ 2 ] ||
+ ( parts[ 3 ] || ( parts[ 1 ] === "http:" ? 80 : 443 ) ) !=
+ ( ajaxLocParts[ 3 ] || ( ajaxLocParts[ 1 ] === "http:" ? 80 : 443 ) ) )
+ );
+ }
+
+ // Convert data if not already a string
+ if ( s.data && s.processData && typeof s.data !== "string" ) {
+ s.data = jQuery.param( s.data, s.traditional );
+ }
+
+ // Apply prefilters
+ inspectPrefiltersOrTransports( prefilters, s, options, jqXHR );
+
+ // If request was aborted inside a prefilter, stop there
+ if ( state === 2 ) {
+ return jqXHR;
+ }
+
+ // We can fire global events as of now if asked to
+ fireGlobals = s.global;
+
+ // Watch for a new set of requests
+ if ( fireGlobals && jQuery.active++ === 0 ) {
+ jQuery.event.trigger("ajaxStart");
+ }
+
+ // Uppercase the type
+ s.type = s.type.toUpperCase();
+
+ // Determine if request has content
+ s.hasContent = !rnoContent.test( s.type );
+
+ // Save the URL in case we're toying with the If-Modified-Since
+ // and/or If-None-Match header later on
+ cacheURL = s.url;
+
+ // More options handling for requests with no content
+ if ( !s.hasContent ) {
+
+ // If data is available, append data to url
+ if ( s.data ) {
+ cacheURL = ( s.url += ( ajax_rquery.test( cacheURL ) ? "&" : "?" ) + s.data );
+ // #9682: remove data so that it's not used in an eventual retry
+ delete s.data;
+ }
+
+ // Add anti-cache in url if needed
+ if ( s.cache === false ) {
+ s.url = rts.test( cacheURL ) ?
+
+ // If there is already a '_' parameter, set its value
+ cacheURL.replace( rts, "$1_=" + ajax_nonce++ ) :
+
+ // Otherwise add one to the end
+ cacheURL + ( ajax_rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ajax_nonce++;
+ }
+ }
+
+ // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+ if ( s.ifModified ) {
+ if ( jQuery.lastModified[ cacheURL ] ) {
+ jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] );
+ }
+ if ( jQuery.etag[ cacheURL ] ) {
+ jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] );
+ }
+ }
+
+ // Set the correct header, if data is being sent
+ if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) {
+ jqXHR.setRequestHeader( "Content-Type", s.contentType );
+ }
+
+ // Set the Accepts header for the server, depending on the dataType
+ jqXHR.setRequestHeader(
+ "Accept",
+ s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[0] ] ?
+ s.accepts[ s.dataTypes[0] ] + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) :
+ s.accepts[ "*" ]
+ );
+
+ // Check for headers option
+ for ( i in s.headers ) {
+ jqXHR.setRequestHeader( i, s.headers[ i ] );
+ }
+
+ // Allow custom headers/mimetypes and early abort
+ if ( s.beforeSend && ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || state === 2 ) ) {
+ // Abort if not done already and return
+ return jqXHR.abort();
+ }
+
+ // aborting is no longer a cancellation
+ strAbort = "abort";
+
+ // Install callbacks on deferreds
+ for ( i in { success: 1, error: 1, complete: 1 } ) {
+ jqXHR[ i ]( s[ i ] );
+ }
+
+ // Get transport
+ transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR );
+
+ // If no transport, we auto-abort
+ if ( !transport ) {
+ done( -1, "No Transport" );
+ } else {
+ jqXHR.readyState = 1;
+
+ // Send global event
+ if ( fireGlobals ) {
+ globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] );
+ }
+ // Timeout
+ if ( s.async && s.timeout > 0 ) {
+ timeoutTimer = setTimeout(function() {
+ jqXHR.abort("timeout");
+ }, s.timeout );
+ }
+
+ try {
+ state = 1;
+ transport.send( requestHeaders, done );
+ } catch ( e ) {
+ // Propagate exception as error if not done
+ if ( state < 2 ) {
+ done( -1, e );
+ // Simply rethrow otherwise
+ } else {
+ throw e;
+ }
+ }
+ }
+
+ // Callback for when everything is done
+ function done( status, nativeStatusText, responses, headers ) {
+ var isSuccess, success, error, response, modified,
+ statusText = nativeStatusText;
+
+ // Called once
+ if ( state === 2 ) {
+ return;
+ }
+
+ // State is "done" now
+ state = 2;
+
+ // Clear timeout if it exists
+ if ( timeoutTimer ) {
+ clearTimeout( timeoutTimer );
+ }
+
+ // Dereference transport for early garbage collection
+ // (no matter how long the jqXHR object will be used)
+ transport = undefined;
+
+ // Cache response headers
+ responseHeadersString = headers || "";
+
+ // Set readyState
+ jqXHR.readyState = status > 0 ? 4 : 0;
+
+ // Get response data
+ if ( responses ) {
+ response = ajaxHandleResponses( s, jqXHR, responses );
+ }
+
+ // If successful, handle type chaining
+ if ( status >= 200 && status < 300 || status === 304 ) {
+
+ // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+ if ( s.ifModified ) {
+ modified = jqXHR.getResponseHeader("Last-Modified");
+ if ( modified ) {
+ jQuery.lastModified[ cacheURL ] = modified;
+ }
+ modified = jqXHR.getResponseHeader("etag");
+ if ( modified ) {
+ jQuery.etag[ cacheURL ] = modified;
+ }
+ }
+
+ // if no content
+ if ( status === 204 ) {
+ isSuccess = true;
+ statusText = "nocontent";
+
+ // if not modified
+ } else if ( status === 304 ) {
+ isSuccess = true;
+ statusText = "notmodified";
+
+ // If we have data, let's convert it
+ } else {
+ isSuccess = ajaxConvert( s, response );
+ statusText = isSuccess.state;
+ success = isSuccess.data;
+ error = isSuccess.error;
+ isSuccess = !error;
+ }
+ } else {
+ // We extract error from statusText
+ // then normalize statusText and status for non-aborts
+ error = statusText;
+ if ( status || !statusText ) {
+ statusText = "error";
+ if ( status < 0 ) {
+ status = 0;
+ }
+ }
+ }
+
+ // Set data for the fake xhr object
+ jqXHR.status = status;
+ jqXHR.statusText = ( nativeStatusText || statusText ) + "";
+
+ // Success/Error
+ if ( isSuccess ) {
+ deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] );
+ } else {
+ deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] );
+ }
+
+ // Status-dependent callbacks
+ jqXHR.statusCode( statusCode );
+ statusCode = undefined;
+
+ if ( fireGlobals ) {
+ globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError",
+ [ jqXHR, s, isSuccess ? success : error ] );
+ }
+
+ // Complete
+ completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] );
+
+ if ( fireGlobals ) {
+ globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] );
+ // Handle the global AJAX counter
+ if ( !( --jQuery.active ) ) {
+ jQuery.event.trigger("ajaxStop");
+ }
+ }
+ }
+
+ return jqXHR;
+ },
+
+ getScript: function( url, callback ) {
+ return jQuery.get( url, undefined, callback, "script" );
+ },
+
+ getJSON: function( url, data, callback ) {
+ return jQuery.get( url, data, callback, "json" );
+ }
+});
+
+/* Handles responses to an ajax request:
+ * - sets all responseXXX fields accordingly
+ * - finds the right dataType (mediates between content-type and expected dataType)
+ * - returns the corresponding response
+ */
+function ajaxHandleResponses( s, jqXHR, responses ) {
+ var firstDataType, ct, finalDataType, type,
+ contents = s.contents,
+ dataTypes = s.dataTypes,
+ responseFields = s.responseFields;
+
+ // Fill responseXXX fields
+ for ( type in responseFields ) {
+ if ( type in responses ) {
+ jqXHR[ responseFields[type] ] = responses[ type ];
+ }
+ }
+
+ // Remove auto dataType and get content-type in the process
+ while( dataTypes[ 0 ] === "*" ) {
+ dataTypes.shift();
+ if ( ct === undefined ) {
+ ct = s.mimeType || jqXHR.getResponseHeader("Content-Type");
+ }
+ }
+
+ // Check if we're dealing with a known content-type
+ if ( ct ) {
+ for ( type in contents ) {
+ if ( contents[ type ] && contents[ type ].test( ct ) ) {
+ dataTypes.unshift( type );
+ break;
+ }
+ }
+ }
+
+ // Check to see if we have a response for the expected dataType
+ if ( dataTypes[ 0 ] in responses ) {
+ finalDataType = dataTypes[ 0 ];
+ } else {
+ // Try convertible dataTypes
+ for ( type in responses ) {
+ if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[0] ] ) {
+ finalDataType = type;
+ break;
+ }
+ if ( !firstDataType ) {
+ firstDataType = type;
+ }
+ }
+ // Or just use first one
+ finalDataType = finalDataType || firstDataType;
+ }
+
+ // If we found a dataType
+ // We add the dataType to the list if needed
+ // and return the corresponding response
+ if ( finalDataType ) {
+ if ( finalDataType !== dataTypes[ 0 ] ) {
+ dataTypes.unshift( finalDataType );
+ }
+ return responses[ finalDataType ];
+ }
+}
+
+// Chain conversions given the request and the original response
+function ajaxConvert( s, response ) {
+ var conv2, current, conv, tmp,
+ converters = {},
+ i = 0,
+ // Work with a copy of dataTypes in case we need to modify it for conversion
+ dataTypes = s.dataTypes.slice(),
+ prev = dataTypes[ 0 ];
+
+ // Apply the dataFilter if provided
+ if ( s.dataFilter ) {
+ response = s.dataFilter( response, s.dataType );
+ }
+
+ // Create converters map with lowercased keys
+ if ( dataTypes[ 1 ] ) {
+ for ( conv in s.converters ) {
+ converters[ conv.toLowerCase() ] = s.converters[ conv ];
+ }
+ }
+
+ // Convert to each sequential dataType, tolerating list modification
+ for ( ; (current = dataTypes[++i]); ) {
+
+ // There's only work to do if current dataType is non-auto
+ if ( current !== "*" ) {
+
+ // Convert response if prev dataType is non-auto and differs from current
+ if ( prev !== "*" && prev !== current ) {
+
+ // Seek a direct converter
+ conv = converters[ prev + " " + current ] || converters[ "* " + current ];
+
+ // If none found, seek a pair
+ if ( !conv ) {
+ for ( conv2 in converters ) {
+
+ // If conv2 outputs current
+ tmp = conv2.split(" ");
+ if ( tmp[ 1 ] === current ) {
+
+ // If prev can be converted to accepted input
+ conv = converters[ prev + " " + tmp[ 0 ] ] ||
+ converters[ "* " + tmp[ 0 ] ];
+ if ( conv ) {
+ // Condense equivalence converters
+ if ( conv === true ) {
+ conv = converters[ conv2 ];
+
+ // Otherwise, insert the intermediate dataType
+ } else if ( converters[ conv2 ] !== true ) {
+ current = tmp[ 0 ];
+ dataTypes.splice( i--, 0, current );
+ }
+
+ break;
+ }
+ }
+ }
+ }
+
+ // Apply converter (if not an equivalence)
+ if ( conv !== true ) {
+
+ // Unless errors are allowed to bubble, catch and return them
+ if ( conv && s["throws"] ) {
+ response = conv( response );
+ } else {
+ try {
+ response = conv( response );
+ } catch ( e ) {
+ return { state: "parsererror", error: conv ? e : "No conversion from " + prev + " to " + current };
+ }
+ }
+ }
+ }
+
+ // Update prev for next iteration
+ prev = current;
+ }
+ }
+
+ return { state: "success", data: response };
+}
+// Install script dataType
+jQuery.ajaxSetup({
+ accepts: {
+ script: "text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"
+ },
+ contents: {
+ script: /(?:java|ecma)script/
+ },
+ converters: {
+ "text script": function( text ) {
+ jQuery.globalEval( text );
+ return text;
+ }
+ }
+});
+
+// Handle cache's special case and global
+jQuery.ajaxPrefilter( "script", function( s ) {
+ if ( s.cache === undefined ) {
+ s.cache = false;
+ }
+ if ( s.crossDomain ) {
+ s.type = "GET";
+ s.global = false;
+ }
+});
+
+// Bind script tag hack transport
+jQuery.ajaxTransport( "script", function(s) {
+
+ // This transport only deals with cross domain requests
+ if ( s.crossDomain ) {
+
+ var script,
+ head = document.head || jQuery("head")[0] || document.documentElement;
+
+ return {
+
+ send: function( _, callback ) {
+
+ script = document.createElement("script");
+
+ script.async = true;
+
+ if ( s.scriptCharset ) {
+ script.charset = s.scriptCharset;
+ }
+
+ script.src = s.url;
+
+ // Attach handlers for all browsers
+ script.onload = script.onreadystatechange = function( _, isAbort ) {
+
+ if ( isAbort || !script.readyState || /loaded|complete/.test( script.readyState ) ) {
+
+ // Handle memory leak in IE
+ script.onload = script.onreadystatechange = null;
+
+ // Remove the script
+ if ( script.parentNode ) {
+ script.parentNode.removeChild( script );
+ }
+
+ // Dereference the script
+ script = null;
+
+ // Callback if not abort
+ if ( !isAbort ) {
+ callback( 200, "success" );
+ }
+ }
+ };
+
+ // Circumvent IE6 bugs with base elements (#2709 and #4378) by prepending
+ // Use native DOM manipulation to avoid our domManip AJAX trickery
+ head.insertBefore( script, head.firstChild );
+ },
+
+ abort: function() {
+ if ( script ) {
+ script.onload( undefined, true );
+ }
+ }
+ };
+ }
+});
+var oldCallbacks = [],
+ rjsonp = /(=)\?(?=&|$)|\?\?/;
+
+// Default jsonp settings
+jQuery.ajaxSetup({
+ jsonp: "callback",
+ jsonpCallback: function() {
+ var callback = oldCallbacks.pop() || ( jQuery.expando + "_" + ( ajax_nonce++ ) );
+ this[ callback ] = true;
+ return callback;
+ }
+});
+
+// Detect, normalize options and install callbacks for jsonp requests
+jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) {
+
+ var callbackName, overwritten, responseContainer,
+ jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ?
+ "url" :
+ typeof s.data === "string" && !( s.contentType || "" ).indexOf("application/x-www-form-urlencoded") && rjsonp.test( s.data ) && "data"
+ );
+
+ // Handle iff the expected data type is "jsonp" or we have a parameter to set
+ if ( jsonProp || s.dataTypes[ 0 ] === "jsonp" ) {
+
+ // Get callback name, remembering preexisting value associated with it
+ callbackName = s.jsonpCallback = jQuery.isFunction( s.jsonpCallback ) ?
+ s.jsonpCallback() :
+ s.jsonpCallback;
+
+ // Insert callback into url or form data
+ if ( jsonProp ) {
+ s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName );
+ } else if ( s.jsonp !== false ) {
+ s.url += ( ajax_rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName;
+ }
+
+ // Use data converter to retrieve json after script execution
+ s.converters["script json"] = function() {
+ if ( !responseContainer ) {
+ jQuery.error( callbackName + " was not called" );
+ }
+ return responseContainer[ 0 ];
+ };
+
+ // force json dataType
+ s.dataTypes[ 0 ] = "json";
+
+ // Install callback
+ overwritten = window[ callbackName ];
+ window[ callbackName ] = function() {
+ responseContainer = arguments;
+ };
+
+ // Clean-up function (fires after converters)
+ jqXHR.always(function() {
+ // Restore preexisting value
+ window[ callbackName ] = overwritten;
+
+ // Save back as free
+ if ( s[ callbackName ] ) {
+ // make sure that re-using the options doesn't screw things around
+ s.jsonpCallback = originalSettings.jsonpCallback;
+
+ // save the callback name for future use
+ oldCallbacks.push( callbackName );
+ }
+
+ // Call if it was a function and we have a response
+ if ( responseContainer && jQuery.isFunction( overwritten ) ) {
+ overwritten( responseContainer[ 0 ] );
+ }
+
+ responseContainer = overwritten = undefined;
+ });
+
+ // Delegate to script
+ return "script";
+ }
+});
+var xhrCallbacks, xhrSupported,
+ xhrId = 0,
+ // #5280: Internet Explorer will keep connections alive if we don't abort on unload
+ xhrOnUnloadAbort = window.ActiveXObject && function() {
+ // Abort all pending requests
+ var key;
+ for ( key in xhrCallbacks ) {
+ xhrCallbacks[ key ]( undefined, true );
+ }
+ };
+
+// Functions to create xhrs
+function createStandardXHR() {
+ try {
+ return new window.XMLHttpRequest();
+ } catch( e ) {}
+}
+
+function createActiveXHR() {
+ try {
+ return new window.ActiveXObject("Microsoft.XMLHTTP");
+ } catch( e ) {}
+}
+
+// Create the request object
+// (This is still attached to ajaxSettings for backward compatibility)
+jQuery.ajaxSettings.xhr = window.ActiveXObject ?
+ /* Microsoft failed to properly
+ * implement the XMLHttpRequest in IE7 (can't request local files),
+ * so we use the ActiveXObject when it is available
+ * Additionally XMLHttpRequest can be disabled in IE7/IE8 so
+ * we need a fallback.
+ */
+ function() {
+ return !this.isLocal && createStandardXHR() || createActiveXHR();
+ } :
+ // For all other browsers, use the standard XMLHttpRequest object
+ createStandardXHR;
+
+// Determine support properties
+xhrSupported = jQuery.ajaxSettings.xhr();
+jQuery.support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported );
+xhrSupported = jQuery.support.ajax = !!xhrSupported;
+
+// Create transport if the browser can provide an xhr
+if ( xhrSupported ) {
+
+ jQuery.ajaxTransport(function( s ) {
+ // Cross domain only allowed if supported through XMLHttpRequest
+ if ( !s.crossDomain || jQuery.support.cors ) {
+
+ var callback;
+
+ return {
+ send: function( headers, complete ) {
+
+ // Get a new xhr
+ var handle, i,
+ xhr = s.xhr();
+
+ // Open the socket
+ // Passing null username, generates a login popup on Opera (#2865)
+ if ( s.username ) {
+ xhr.open( s.type, s.url, s.async, s.username, s.password );
+ } else {
+ xhr.open( s.type, s.url, s.async );
+ }
+
+ // Apply custom fields if provided
+ if ( s.xhrFields ) {
+ for ( i in s.xhrFields ) {
+ xhr[ i ] = s.xhrFields[ i ];
+ }
+ }
+
+ // Override mime type if needed
+ if ( s.mimeType && xhr.overrideMimeType ) {
+ xhr.overrideMimeType( s.mimeType );
+ }
+
+ // X-Requested-With header
+ // For cross-domain requests, seeing as conditions for a preflight are
+ // akin to a jigsaw puzzle, we simply never set it to be sure.
+ // (it can always be set on a per-request basis or even using ajaxSetup)
+ // For same-domain requests, won't change header if already provided.
+ if ( !s.crossDomain && !headers["X-Requested-With"] ) {
+ headers["X-Requested-With"] = "XMLHttpRequest";
+ }
+
+ // Need an extra try/catch for cross domain requests in Firefox 3
+ try {
+ for ( i in headers ) {
+ xhr.setRequestHeader( i, headers[ i ] );
+ }
+ } catch( err ) {}
+
+ // Do send the request
+ // This may raise an exception which is actually
+ // handled in jQuery.ajax (so no try/catch here)
+ xhr.send( ( s.hasContent && s.data ) || null );
+
+ // Listener
+ callback = function( _, isAbort ) {
+ var status, responseHeaders, statusText, responses;
+
+ // Firefox throws exceptions when accessing properties
+ // of an xhr when a network error occurred
+ // http://helpful.knobs-dials.com/index.php/Component_returned_failure_code:_0x80040111_(NS_ERROR_NOT_AVAILABLE)
+ try {
+
+ // Was never called and is aborted or complete
+ if ( callback && ( isAbort || xhr.readyState === 4 ) ) {
+
+ // Only called once
+ callback = undefined;
+
+ // Do not keep as active anymore
+ if ( handle ) {
+ xhr.onreadystatechange = jQuery.noop;
+ if ( xhrOnUnloadAbort ) {
+ delete xhrCallbacks[ handle ];
+ }
+ }
+
+ // If it's an abort
+ if ( isAbort ) {
+ // Abort it manually if needed
+ if ( xhr.readyState !== 4 ) {
+ xhr.abort();
+ }
+ } else {
+ responses = {};
+ status = xhr.status;
+ responseHeaders = xhr.getAllResponseHeaders();
+
+ // When requesting binary data, IE6-9 will throw an exception
+ // on any attempt to access responseText (#11426)
+ if ( typeof xhr.responseText === "string" ) {
+ responses.text = xhr.responseText;
+ }
+
+ // Firefox throws an exception when accessing
+ // statusText for faulty cross-domain requests
+ try {
+ statusText = xhr.statusText;
+ } catch( e ) {
+ // We normalize with Webkit giving an empty statusText
+ statusText = "";
+ }
+
+ // Filter status for non standard behaviors
+
+ // If the request is local and we have data: assume a success
+ // (success with no data won't get notified, that's the best we
+ // can do given current implementations)
+ if ( !status && s.isLocal && !s.crossDomain ) {
+ status = responses.text ? 200 : 404;
+ // IE - #1450: sometimes returns 1223 when it should be 204
+ } else if ( status === 1223 ) {
+ status = 204;
+ }
+ }
+ }
+ } catch( firefoxAccessException ) {
+ if ( !isAbort ) {
+ complete( -1, firefoxAccessException );
+ }
+ }
+
+ // Call complete if needed
+ if ( responses ) {
+ complete( status, statusText, responses, responseHeaders );
+ }
+ };
+
+ if ( !s.async ) {
+ // if we're in sync mode we fire the callback
+ callback();
+ } else if ( xhr.readyState === 4 ) {
+ // (IE6 & IE7) if it's in cache and has been
+ // retrieved directly we need to fire the callback
+ setTimeout( callback );
+ } else {
+ handle = ++xhrId;
+ if ( xhrOnUnloadAbort ) {
+ // Create the active xhrs callbacks list if needed
+ // and attach the unload handler
+ if ( !xhrCallbacks ) {
+ xhrCallbacks = {};
+ jQuery( window ).unload( xhrOnUnloadAbort );
+ }
+ // Add to list of active xhrs callbacks
+ xhrCallbacks[ handle ] = callback;
+ }
+ xhr.onreadystatechange = callback;
+ }
+ },
+
+ abort: function() {
+ if ( callback ) {
+ callback( undefined, true );
+ }
+ }
+ };
+ }
+ });
+}
+var fxNow, timerId,
+ rfxtypes = /^(?:toggle|show|hide)$/,
+ rfxnum = new RegExp( "^(?:([+-])=|)(" + core_pnum + ")([a-z%]*)$", "i" ),
+ rrun = /queueHooks$/,
+ animationPrefilters = [ defaultPrefilter ],
+ tweeners = {
+ "*": [function( prop, value ) {
+ var end, unit,
+ tween = this.createTween( prop, value ),
+ parts = rfxnum.exec( value ),
+ target = tween.cur(),
+ start = +target || 0,
+ scale = 1,
+ maxIterations = 20;
+
+ if ( parts ) {
+ end = +parts[2];
+ unit = parts[3] || ( jQuery.cssNumber[ prop ] ? "" : "px" );
+
+ // We need to compute starting value
+ if ( unit !== "px" && start ) {
+ // Iteratively approximate from a nonzero starting point
+ // Prefer the current property, because this process will be trivial if it uses the same units
+ // Fallback to end or a simple constant
+ start = jQuery.css( tween.elem, prop, true ) || end || 1;
+
+ do {
+ // If previous iteration zeroed out, double until we get *something*
+ // Use a string for doubling factor so we don't accidentally see scale as unchanged below
+ scale = scale || ".5";
+
+ // Adjust and apply
+ start = start / scale;
+ jQuery.style( tween.elem, prop, start + unit );
+
+ // Update scale, tolerating zero or NaN from tween.cur()
+ // And breaking the loop if scale is unchanged or perfect, or if we've just had enough
+ } while ( scale !== (scale = tween.cur() / target) && scale !== 1 && --maxIterations );
+ }
+
+ tween.unit = unit;
+ tween.start = start;
+ // If a +=/-= token was provided, we're doing a relative animation
+ tween.end = parts[1] ? start + ( parts[1] + 1 ) * end : end;
+ }
+ return tween;
+ }]
+ };
+
+// Animations created synchronously will run synchronously
+function createFxNow() {
+ setTimeout(function() {
+ fxNow = undefined;
+ });
+ return ( fxNow = jQuery.now() );
+}
+
+function createTweens( animation, props ) {
+ jQuery.each( props, function( prop, value ) {
+ var collection = ( tweeners[ prop ] || [] ).concat( tweeners[ "*" ] ),
+ index = 0,
+ length = collection.length;
+ for ( ; index < length; index++ ) {
+ if ( collection[ index ].call( animation, prop, value ) ) {
+
+ // we're done with this property
+ return;
+ }
+ }
+ });
+}
+
+function Animation( elem, properties, options ) {
+ var result,
+ stopped,
+ index = 0,
+ length = animationPrefilters.length,
+ deferred = jQuery.Deferred().always( function() {
+ // don't match elem in the :animated selector
+ delete tick.elem;
+ }),
+ tick = function() {
+ if ( stopped ) {
+ return false;
+ }
+ var currentTime = fxNow || createFxNow(),
+ remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ),
+ // archaic crash bug won't allow us to use 1 - ( 0.5 || 0 ) (#12497)
+ temp = remaining / animation.duration || 0,
+ percent = 1 - temp,
+ index = 0,
+ length = animation.tweens.length;
+
+ for ( ; index < length ; index++ ) {
+ animation.tweens[ index ].run( percent );
+ }
+
+ deferred.notifyWith( elem, [ animation, percent, remaining ]);
+
+ if ( percent < 1 && length ) {
+ return remaining;
+ } else {
+ deferred.resolveWith( elem, [ animation ] );
+ return false;
+ }
+ },
+ animation = deferred.promise({
+ elem: elem,
+ props: jQuery.extend( {}, properties ),
+ opts: jQuery.extend( true, { specialEasing: {} }, options ),
+ originalProperties: properties,
+ originalOptions: options,
+ startTime: fxNow || createFxNow(),
+ duration: options.duration,
+ tweens: [],
+ createTween: function( prop, end ) {
+ var tween = jQuery.Tween( elem, animation.opts, prop, end,
+ animation.opts.specialEasing[ prop ] || animation.opts.easing );
+ animation.tweens.push( tween );
+ return tween;
+ },
+ stop: function( gotoEnd ) {
+ var index = 0,
+ // if we are going to the end, we want to run all the tweens
+ // otherwise we skip this part
+ length = gotoEnd ? animation.tweens.length : 0;
+ if ( stopped ) {
+ return this;
+ }
+ stopped = true;
+ for ( ; index < length ; index++ ) {
+ animation.tweens[ index ].run( 1 );
+ }
+
+ // resolve when we played the last frame
+ // otherwise, reject
+ if ( gotoEnd ) {
+ deferred.resolveWith( elem, [ animation, gotoEnd ] );
+ } else {
+ deferred.rejectWith( elem, [ animation, gotoEnd ] );
+ }
+ return this;
+ }
+ }),
+ props = animation.props;
+
+ propFilter( props, animation.opts.specialEasing );
+
+ for ( ; index < length ; index++ ) {
+ result = animationPrefilters[ index ].call( animation, elem, props, animation.opts );
+ if ( result ) {
+ return result;
+ }
+ }
+
+ createTweens( animation, props );
+
+ if ( jQuery.isFunction( animation.opts.start ) ) {
+ animation.opts.start.call( elem, animation );
+ }
+
+ jQuery.fx.timer(
+ jQuery.extend( tick, {
+ elem: elem,
+ anim: animation,
+ queue: animation.opts.queue
+ })
+ );
+
+ // attach callbacks from options
+ return animation.progress( animation.opts.progress )
+ .done( animation.opts.done, animation.opts.complete )
+ .fail( animation.opts.fail )
+ .always( animation.opts.always );
+}
+
+function propFilter( props, specialEasing ) {
+ var value, name, index, easing, hooks;
+
+ // camelCase, specialEasing and expand cssHook pass
+ for ( index in props ) {
+ name = jQuery.camelCase( index );
+ easing = specialEasing[ name ];
+ value = props[ index ];
+ if ( jQuery.isArray( value ) ) {
+ easing = value[ 1 ];
+ value = props[ index ] = value[ 0 ];
+ }
+
+ if ( index !== name ) {
+ props[ name ] = value;
+ delete props[ index ];
+ }
+
+ hooks = jQuery.cssHooks[ name ];
+ if ( hooks && "expand" in hooks ) {
+ value = hooks.expand( value );
+ delete props[ name ];
+
+ // not quite $.extend, this wont overwrite keys already present.
+ // also - reusing 'index' from above because we have the correct "name"
+ for ( index in value ) {
+ if ( !( index in props ) ) {
+ props[ index ] = value[ index ];
+ specialEasing[ index ] = easing;
+ }
+ }
+ } else {
+ specialEasing[ name ] = easing;
+ }
+ }
+}
+
+jQuery.Animation = jQuery.extend( Animation, {
+
+ tweener: function( props, callback ) {
+ if ( jQuery.isFunction( props ) ) {
+ callback = props;
+ props = [ "*" ];
+ } else {
+ props = props.split(" ");
+ }
+
+ var prop,
+ index = 0,
+ length = props.length;
+
+ for ( ; index < length ; index++ ) {
+ prop = props[ index ];
+ tweeners[ prop ] = tweeners[ prop ] || [];
+ tweeners[ prop ].unshift( callback );
+ }
+ },
+
+ prefilter: function( callback, prepend ) {
+ if ( prepend ) {
+ animationPrefilters.unshift( callback );
+ } else {
+ animationPrefilters.push( callback );
+ }
+ }
+});
+
+function defaultPrefilter( elem, props, opts ) {
+ /*jshint validthis:true */
+ var prop, index, length,
+ value, dataShow, toggle,
+ tween, hooks, oldfire,
+ anim = this,
+ style = elem.style,
+ orig = {},
+ handled = [],
+ hidden = elem.nodeType && isHidden( elem );
+
+ // handle queue: false promises
+ if ( !opts.queue ) {
+ hooks = jQuery._queueHooks( elem, "fx" );
+ if ( hooks.unqueued == null ) {
+ hooks.unqueued = 0;
+ oldfire = hooks.empty.fire;
+ hooks.empty.fire = function() {
+ if ( !hooks.unqueued ) {
+ oldfire();
+ }
+ };
+ }
+ hooks.unqueued++;
+
+ anim.always(function() {
+ // doing this makes sure that the complete handler will be called
+ // before this completes
+ anim.always(function() {
+ hooks.unqueued--;
+ if ( !jQuery.queue( elem, "fx" ).length ) {
+ hooks.empty.fire();
+ }
+ });
+ });
+ }
+
+ // height/width overflow pass
+ if ( elem.nodeType === 1 && ( "height" in props || "width" in props ) ) {
+ // Make sure that nothing sneaks out
+ // Record all 3 overflow attributes because IE does not
+ // change the overflow attribute when overflowX and
+ // overflowY are set to the same value
+ opts.overflow = [ style.overflow, style.overflowX, style.overflowY ];
+
+ // Set display property to inline-block for height/width
+ // animations on inline elements that are having width/height animated
+ if ( jQuery.css( elem, "display" ) === "inline" &&
+ jQuery.css( elem, "float" ) === "none" ) {
+
+ // inline-level elements accept inline-block;
+ // block-level elements need to be inline with layout
+ if ( !jQuery.support.inlineBlockNeedsLayout || css_defaultDisplay( elem.nodeName ) === "inline" ) {
+ style.display = "inline-block";
+
+ } else {
+ style.zoom = 1;
+ }
+ }
+ }
+
+ if ( opts.overflow ) {
+ style.overflow = "hidden";
+ if ( !jQuery.support.shrinkWrapBlocks ) {
+ anim.always(function() {
+ style.overflow = opts.overflow[ 0 ];
+ style.overflowX = opts.overflow[ 1 ];
+ style.overflowY = opts.overflow[ 2 ];
+ });
+ }
+ }
+
+
+ // show/hide pass
+ for ( index in props ) {
+ value = props[ index ];
+ if ( rfxtypes.exec( value ) ) {
+ delete props[ index ];
+ toggle = toggle || value === "toggle";
+ if ( value === ( hidden ? "hide" : "show" ) ) {
+ continue;
+ }
+ handled.push( index );
+ }
+ }
+
+ length = handled.length;
+ if ( length ) {
+ dataShow = jQuery._data( elem, "fxshow" ) || jQuery._data( elem, "fxshow", {} );
+ if ( "hidden" in dataShow ) {
+ hidden = dataShow.hidden;
+ }
+
+ // store state if its toggle - enables .stop().toggle() to "reverse"
+ if ( toggle ) {
+ dataShow.hidden = !hidden;
+ }
+ if ( hidden ) {
+ jQuery( elem ).show();
+ } else {
+ anim.done(function() {
+ jQuery( elem ).hide();
+ });
+ }
+ anim.done(function() {
+ var prop;
+ jQuery._removeData( elem, "fxshow" );
+ for ( prop in orig ) {
+ jQuery.style( elem, prop, orig[ prop ] );
+ }
+ });
+ for ( index = 0 ; index < length ; index++ ) {
+ prop = handled[ index ];
+ tween = anim.createTween( prop, hidden ? dataShow[ prop ] : 0 );
+ orig[ prop ] = dataShow[ prop ] || jQuery.style( elem, prop );
+
+ if ( !( prop in dataShow ) ) {
+ dataShow[ prop ] = tween.start;
+ if ( hidden ) {
+ tween.end = tween.start;
+ tween.start = prop === "width" || prop === "height" ? 1 : 0;
+ }
+ }
+ }
+ }
+}
+
+function Tween( elem, options, prop, end, easing ) {
+ return new Tween.prototype.init( elem, options, prop, end, easing );
+}
+jQuery.Tween = Tween;
+
+Tween.prototype = {
+ constructor: Tween,
+ init: function( elem, options, prop, end, easing, unit ) {
+ this.elem = elem;
+ this.prop = prop;
+ this.easing = easing || "swing";
+ this.options = options;
+ this.start = this.now = this.cur();
+ this.end = end;
+ this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" );
+ },
+ cur: function() {
+ var hooks = Tween.propHooks[ this.prop ];
+
+ return hooks && hooks.get ?
+ hooks.get( this ) :
+ Tween.propHooks._default.get( this );
+ },
+ run: function( percent ) {
+ var eased,
+ hooks = Tween.propHooks[ this.prop ];
+
+ if ( this.options.duration ) {
+ this.pos = eased = jQuery.easing[ this.easing ](
+ percent, this.options.duration * percent, 0, 1, this.options.duration
+ );
+ } else {
+ this.pos = eased = percent;
+ }
+ this.now = ( this.end - this.start ) * eased + this.start;
+
+ if ( this.options.step ) {
+ this.options.step.call( this.elem, this.now, this );
+ }
+
+ if ( hooks && hooks.set ) {
+ hooks.set( this );
+ } else {
+ Tween.propHooks._default.set( this );
+ }
+ return this;
+ }
+};
+
+Tween.prototype.init.prototype = Tween.prototype;
+
+Tween.propHooks = {
+ _default: {
+ get: function( tween ) {
+ var result;
+
+ if ( tween.elem[ tween.prop ] != null &&
+ (!tween.elem.style || tween.elem.style[ tween.prop ] == null) ) {
+ return tween.elem[ tween.prop ];
+ }
+
+ // passing an empty string as a 3rd parameter to .css will automatically
+ // attempt a parseFloat and fallback to a string if the parse fails
+ // so, simple values such as "10px" are parsed to Float.
+ // complex values such as "rotate(1rad)" are returned as is.
+ result = jQuery.css( tween.elem, tween.prop, "" );
+ // Empty strings, null, undefined and "auto" are converted to 0.
+ return !result || result === "auto" ? 0 : result;
+ },
+ set: function( tween ) {
+ // use step hook for back compat - use cssHook if its there - use .style if its
+ // available and use plain properties where available
+ if ( jQuery.fx.step[ tween.prop ] ) {
+ jQuery.fx.step[ tween.prop ]( tween );
+ } else if ( tween.elem.style && ( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null || jQuery.cssHooks[ tween.prop ] ) ) {
+ jQuery.style( tween.elem, tween.prop, tween.now + tween.unit );
+ } else {
+ tween.elem[ tween.prop ] = tween.now;
+ }
+ }
+ }
+};
+
+// Remove in 2.0 - this supports IE8's panic based approach
+// to setting things on disconnected nodes
+
+Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = {
+ set: function( tween ) {
+ if ( tween.elem.nodeType && tween.elem.parentNode ) {
+ tween.elem[ tween.prop ] = tween.now;
+ }
+ }
+};
+
+jQuery.each([ "toggle", "show", "hide" ], function( i, name ) {
+ var cssFn = jQuery.fn[ name ];
+ jQuery.fn[ name ] = function( speed, easing, callback ) {
+ return speed == null || typeof speed === "boolean" ?
+ cssFn.apply( this, arguments ) :
+ this.animate( genFx( name, true ), speed, easing, callback );
+ };
+});
+
+jQuery.fn.extend({
+ fadeTo: function( speed, to, easing, callback ) {
+
+ // show any hidden elements after setting opacity to 0
+ return this.filter( isHidden ).css( "opacity", 0 ).show()
+
+ // animate to the value specified
+ .end().animate({ opacity: to }, speed, easing, callback );
+ },
+ animate: function( prop, speed, easing, callback ) {
+ var empty = jQuery.isEmptyObject( prop ),
+ optall = jQuery.speed( speed, easing, callback ),
+ doAnimation = function() {
+ // Operate on a copy of prop so per-property easing won't be lost
+ var anim = Animation( this, jQuery.extend( {}, prop ), optall );
+ doAnimation.finish = function() {
+ anim.stop( true );
+ };
+ // Empty animations, or finishing resolves immediately
+ if ( empty || jQuery._data( this, "finish" ) ) {
+ anim.stop( true );
+ }
+ };
+ doAnimation.finish = doAnimation;
+
+ return empty || optall.queue === false ?
+ this.each( doAnimation ) :
+ this.queue( optall.queue, doAnimation );
+ },
+ stop: function( type, clearQueue, gotoEnd ) {
+ var stopQueue = function( hooks ) {
+ var stop = hooks.stop;
+ delete hooks.stop;
+ stop( gotoEnd );
+ };
+
+ if ( typeof type !== "string" ) {
+ gotoEnd = clearQueue;
+ clearQueue = type;
+ type = undefined;
+ }
+ if ( clearQueue && type !== false ) {
+ this.queue( type || "fx", [] );
+ }
+
+ return this.each(function() {
+ var dequeue = true,
+ index = type != null && type + "queueHooks",
+ timers = jQuery.timers,
+ data = jQuery._data( this );
+
+ if ( index ) {
+ if ( data[ index ] && data[ index ].stop ) {
+ stopQueue( data[ index ] );
+ }
+ } else {
+ for ( index in data ) {
+ if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) {
+ stopQueue( data[ index ] );
+ }
+ }
+ }
+
+ for ( index = timers.length; index--; ) {
+ if ( timers[ index ].elem === this && (type == null || timers[ index ].queue === type) ) {
+ timers[ index ].anim.stop( gotoEnd );
+ dequeue = false;
+ timers.splice( index, 1 );
+ }
+ }
+
+ // start the next in the queue if the last step wasn't forced
+ // timers currently will call their complete callbacks, which will dequeue
+ // but only if they were gotoEnd
+ if ( dequeue || !gotoEnd ) {
+ jQuery.dequeue( this, type );
+ }
+ });
+ },
+ finish: function( type ) {
+ if ( type !== false ) {
+ type = type || "fx";
+ }
+ return this.each(function() {
+ var index,
+ data = jQuery._data( this ),
+ queue = data[ type + "queue" ],
+ hooks = data[ type + "queueHooks" ],
+ timers = jQuery.timers,
+ length = queue ? queue.length : 0;
+
+ // enable finishing flag on private data
+ data.finish = true;
+
+ // empty the queue first
+ jQuery.queue( this, type, [] );
+
+ if ( hooks && hooks.cur && hooks.cur.finish ) {
+ hooks.cur.finish.call( this );
+ }
+
+ // look for any active animations, and finish them
+ for ( index = timers.length; index--; ) {
+ if ( timers[ index ].elem === this && timers[ index ].queue === type ) {
+ timers[ index ].anim.stop( true );
+ timers.splice( index, 1 );
+ }
+ }
+
+ // look for any animations in the old queue and finish them
+ for ( index = 0; index < length; index++ ) {
+ if ( queue[ index ] && queue[ index ].finish ) {
+ queue[ index ].finish.call( this );
+ }
+ }
+
+ // turn off finishing flag
+ delete data.finish;
+ });
+ }
+});
+
+// Generate parameters to create a standard animation
+function genFx( type, includeWidth ) {
+ var which,
+ attrs = { height: type },
+ i = 0;
+
+ // if we include width, step value is 1 to do all cssExpand values,
+ // if we don't include width, step value is 2 to skip over Left and Right
+ includeWidth = includeWidth? 1 : 0;
+ for( ; i < 4 ; i += 2 - includeWidth ) {
+ which = cssExpand[ i ];
+ attrs[ "margin" + which ] = attrs[ "padding" + which ] = type;
+ }
+
+ if ( includeWidth ) {
+ attrs.opacity = attrs.width = type;
+ }
+
+ return attrs;
+}
+
+// Generate shortcuts for custom animations
+jQuery.each({
+ slideDown: genFx("show"),
+ slideUp: genFx("hide"),
+ slideToggle: genFx("toggle"),
+ fadeIn: { opacity: "show" },
+ fadeOut: { opacity: "hide" },
+ fadeToggle: { opacity: "toggle" }
+}, function( name, props ) {
+ jQuery.fn[ name ] = function( speed, easing, callback ) {
+ return this.animate( props, speed, easing, callback );
+ };
+});
+
+jQuery.speed = function( speed, easing, fn ) {
+ var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : {
+ complete: fn || !fn && easing ||
+ jQuery.isFunction( speed ) && speed,
+ duration: speed,
+ easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing
+ };
+
+ opt.duration = jQuery.fx.off ? 0 : typeof opt.duration === "number" ? opt.duration :
+ opt.duration in jQuery.fx.speeds ? jQuery.fx.speeds[ opt.duration ] : jQuery.fx.speeds._default;
+
+ // normalize opt.queue - true/undefined/null -> "fx"
+ if ( opt.queue == null || opt.queue === true ) {
+ opt.queue = "fx";
+ }
+
+ // Queueing
+ opt.old = opt.complete;
+
+ opt.complete = function() {
+ if ( jQuery.isFunction( opt.old ) ) {
+ opt.old.call( this );
+ }
+
+ if ( opt.queue ) {
+ jQuery.dequeue( this, opt.queue );
+ }
+ };
+
+ return opt;
+};
+
+jQuery.easing = {
+ linear: function( p ) {
+ return p;
+ },
+ swing: function( p ) {
+ return 0.5 - Math.cos( p*Math.PI ) / 2;
+ }
+};
+
+jQuery.timers = [];
+jQuery.fx = Tween.prototype.init;
+jQuery.fx.tick = function() {
+ var timer,
+ timers = jQuery.timers,
+ i = 0;
+
+ fxNow = jQuery.now();
+
+ for ( ; i < timers.length; i++ ) {
+ timer = timers[ i ];
+ // Checks the timer has not already been removed
+ if ( !timer() && timers[ i ] === timer ) {
+ timers.splice( i--, 1 );
+ }
+ }
+
+ if ( !timers.length ) {
+ jQuery.fx.stop();
+ }
+ fxNow = undefined;
+};
+
+jQuery.fx.timer = function( timer ) {
+ if ( timer() && jQuery.timers.push( timer ) ) {
+ jQuery.fx.start();
+ }
+};
+
+jQuery.fx.interval = 13;
+
+jQuery.fx.start = function() {
+ if ( !timerId ) {
+ timerId = setInterval( jQuery.fx.tick, jQuery.fx.interval );
+ }
+};
+
+jQuery.fx.stop = function() {
+ clearInterval( timerId );
+ timerId = null;
+};
+
+jQuery.fx.speeds = {
+ slow: 600,
+ fast: 200,
+ // Default speed
+ _default: 400
+};
+
+// Back Compat <1.8 extension point
+jQuery.fx.step = {};
+
+if ( jQuery.expr && jQuery.expr.filters ) {
+ jQuery.expr.filters.animated = function( elem ) {
+ return jQuery.grep(jQuery.timers, function( fn ) {
+ return elem === fn.elem;
+ }).length;
+ };
+}
+jQuery.fn.offset = function( options ) {
+ if ( arguments.length ) {
+ return options === undefined ?
+ this :
+ this.each(function( i ) {
+ jQuery.offset.setOffset( this, options, i );
+ });
+ }
+
+ var docElem, win,
+ box = { top: 0, left: 0 },
+ elem = this[ 0 ],
+ doc = elem && elem.ownerDocument;
+
+ if ( !doc ) {
+ return;
+ }
+
+ docElem = doc.documentElement;
+
+ // Make sure it's not a disconnected DOM node
+ if ( !jQuery.contains( docElem, elem ) ) {
+ return box;
+ }
+
+ // If we don't have gBCR, just use 0,0 rather than error
+ // BlackBerry 5, iOS 3 (original iPhone)
+ if ( typeof elem.getBoundingClientRect !== core_strundefined ) {
+ box = elem.getBoundingClientRect();
+ }
+ win = getWindow( doc );
+ return {
+ top: box.top + ( win.pageYOffset || docElem.scrollTop ) - ( docElem.clientTop || 0 ),
+ left: box.left + ( win.pageXOffset || docElem.scrollLeft ) - ( docElem.clientLeft || 0 )
+ };
+};
+
+jQuery.offset = {
+
+ setOffset: function( elem, options, i ) {
+ var position = jQuery.css( elem, "position" );
+
+ // set position first, in-case top/left are set even on static elem
+ if ( position === "static" ) {
+ elem.style.position = "relative";
+ }
+
+ var curElem = jQuery( elem ),
+ curOffset = curElem.offset(),
+ curCSSTop = jQuery.css( elem, "top" ),
+ curCSSLeft = jQuery.css( elem, "left" ),
+ calculatePosition = ( position === "absolute" || position === "fixed" ) && jQuery.inArray("auto", [curCSSTop, curCSSLeft]) > -1,
+ props = {}, curPosition = {}, curTop, curLeft;
+
+ // need to be able to calculate position if either top or left is auto and position is either absolute or fixed
+ if ( calculatePosition ) {
+ curPosition = curElem.position();
+ curTop = curPosition.top;
+ curLeft = curPosition.left;
+ } else {
+ curTop = parseFloat( curCSSTop ) || 0;
+ curLeft = parseFloat( curCSSLeft ) || 0;
+ }
+
+ if ( jQuery.isFunction( options ) ) {
+ options = options.call( elem, i, curOffset );
+ }
+
+ if ( options.top != null ) {
+ props.top = ( options.top - curOffset.top ) + curTop;
+ }
+ if ( options.left != null ) {
+ props.left = ( options.left - curOffset.left ) + curLeft;
+ }
+
+ if ( "using" in options ) {
+ options.using.call( elem, props );
+ } else {
+ curElem.css( props );
+ }
+ }
+};
+
+
+jQuery.fn.extend({
+
+ position: function() {
+ if ( !this[ 0 ] ) {
+ return;
+ }
+
+ var offsetParent, offset,
+ parentOffset = { top: 0, left: 0 },
+ elem = this[ 0 ];
+
+ // fixed elements are offset from window (parentOffset = {top:0, left: 0}, because it is it's only offset parent
+ if ( jQuery.css( elem, "position" ) === "fixed" ) {
+ // we assume that getBoundingClientRect is available when computed position is fixed
+ offset = elem.getBoundingClientRect();
+ } else {
+ // Get *real* offsetParent
+ offsetParent = this.offsetParent();
+
+ // Get correct offsets
+ offset = this.offset();
+ if ( !jQuery.nodeName( offsetParent[ 0 ], "html" ) ) {
+ parentOffset = offsetParent.offset();
+ }
+
+ // Add offsetParent borders
+ parentOffset.top += jQuery.css( offsetParent[ 0 ], "borderTopWidth", true );
+ parentOffset.left += jQuery.css( offsetParent[ 0 ], "borderLeftWidth", true );
+ }
+
+ // Subtract parent offsets and element margins
+ // note: when an element has margin: auto the offsetLeft and marginLeft
+ // are the same in Safari causing offset.left to incorrectly be 0
+ return {
+ top: offset.top - parentOffset.top - jQuery.css( elem, "marginTop", true ),
+ left: offset.left - parentOffset.left - jQuery.css( elem, "marginLeft", true)
+ };
+ },
+
+ offsetParent: function() {
+ return this.map(function() {
+ var offsetParent = this.offsetParent || document.documentElement;
+ while ( offsetParent && ( !jQuery.nodeName( offsetParent, "html" ) && jQuery.css( offsetParent, "position") === "static" ) ) {
+ offsetParent = offsetParent.offsetParent;
+ }
+ return offsetParent || document.documentElement;
+ });
+ }
+});
+
+
+// Create scrollLeft and scrollTop methods
+jQuery.each( {scrollLeft: "pageXOffset", scrollTop: "pageYOffset"}, function( method, prop ) {
+ var top = /Y/.test( prop );
+
+ jQuery.fn[ method ] = function( val ) {
+ return jQuery.access( this, function( elem, method, val ) {
+ var win = getWindow( elem );
+
+ if ( val === undefined ) {
+ return win ? (prop in win) ? win[ prop ] :
+ win.document.documentElement[ method ] :
+ elem[ method ];
+ }
+
+ if ( win ) {
+ win.scrollTo(
+ !top ? val : jQuery( win ).scrollLeft(),
+ top ? val : jQuery( win ).scrollTop()
+ );
+
+ } else {
+ elem[ method ] = val;
+ }
+ }, method, val, arguments.length, null );
+ };
+});
+
+function getWindow( elem ) {
+ return jQuery.isWindow( elem ) ?
+ elem :
+ elem.nodeType === 9 ?
+ elem.defaultView || elem.parentWindow :
+ false;
+}
+// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
+jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
+ jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name }, function( defaultExtra, funcName ) {
+ // margin is only for outerHeight, outerWidth
+ jQuery.fn[ funcName ] = function( margin, value ) {
+ var chainable = arguments.length && ( defaultExtra || typeof margin !== "boolean" ),
+ extra = defaultExtra || ( margin === true || value === true ? "margin" : "border" );
+
+ return jQuery.access( this, function( elem, type, value ) {
+ var doc;
+
+ if ( jQuery.isWindow( elem ) ) {
+ // As of 5/8/2012 this will yield incorrect results for Mobile Safari, but there
+ // isn't a whole lot we can do. See pull request at this URL for discussion:
+ // https://github.com/jquery/jquery/pull/764
+ return elem.document.documentElement[ "client" + name ];
+ }
+
+ // Get document width or height
+ if ( elem.nodeType === 9 ) {
+ doc = elem.documentElement;
+
+ // Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height], whichever is greatest
+ // unfortunately, this causes bug #3838 in IE6/8 only, but there is currently no good, small way to fix it.
+ return Math.max(
+ elem.body[ "scroll" + name ], doc[ "scroll" + name ],
+ elem.body[ "offset" + name ], doc[ "offset" + name ],
+ doc[ "client" + name ]
+ );
+ }
+
+ return value === undefined ?
+ // Get width or height on the element, requesting but not forcing parseFloat
+ jQuery.css( elem, type, extra ) :
+
+ // Set width or height on the element
+ jQuery.style( elem, type, value, extra );
+ }, type, chainable ? margin : undefined, chainable, null );
+ };
+ });
+});
+// Limit scope pollution from any deprecated API
+// (function() {
+
+// })();
+// Expose jQuery to the global object
+window.jQuery = window.$ = jQuery;
+
+// Expose jQuery as an AMD module, but only for AMD loaders that
+// understand the issues with loading multiple versions of jQuery
+// in a page that all might call define(). The loader will indicate
+// they have special allowances for multiple jQuery versions by
+// specifying define.amd.jQuery = true. Register as a named module,
+// since jQuery can be concatenated with other files that may use define,
+// but not use a proper concatenation script that understands anonymous
+// AMD modules. A named AMD is safest and most robust way to register.
+// Lowercase jquery is used because AMD module names are derived from
+// file names, and jQuery is normally delivered in a lowercase file name.
+// Do this after creating the global so that if an AMD module wants to call
+// noConflict to hide this version of jQuery, it will work.
+if ( typeof define === "function" && define.amd && define.amd.jQuery ) {
+ define( "jquery", [], function () { return jQuery; } );
+}
+
+})( window );
diff --git a/pype/premiere/extensions/com.pype.avalon/js/json2.js b/pype/premiere/extensions/com.pype/lib/json2.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/js/json2.js
rename to pype/premiere/extensions/com.pype/lib/json2.js
diff --git a/pype/premiere/extensions/com.pype/package.json b/pype/premiere/extensions/com.pype/package.json
new file mode 100644
index 0000000000..fa61712c59
--- /dev/null
+++ b/pype/premiere/extensions/com.pype/package.json
@@ -0,0 +1,30 @@
+{
+ "name": "com.pype",
+ "version": "1.0.0",
+ "description": "pype avalon integration",
+ "license": "ISC",
+ "main": "CSXS\\manifest.xml",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "dependencies": {
+ "bluebird": "^3.7.2",
+ "decompress-zip": "^0.2.2",
+ "fs": "^0.0.1-security",
+ "jsonfile": "^6.0.1",
+ "junk": "^3.1.0",
+ "mkdirp": "^1.0.4",
+ "node-fetch": "^2.6.0",
+ "node-timecodes": "^2.5.0",
+ "opn": "^6.0.0",
+ "os": "^0.1.1",
+ "path": "^0.12.7",
+ "process": "^0.11.10",
+ "pure-uuid": "^1.6.0",
+ "rimraf": "^3.0.2",
+ "url": "^0.11.0",
+ "walk": "^2.3.14",
+ "xml2js": "^0.4.23"
+ },
+ "devDependencies": {}
+}
diff --git a/pype/premiere/extensions/com.pype/pypeApp.jsx b/pype/premiere/extensions/com.pype/pypeApp.jsx
new file mode 100644
index 0000000000..07a1aa9492
--- /dev/null
+++ b/pype/premiere/extensions/com.pype/pypeApp.jsx
@@ -0,0 +1,15 @@
+/* global $, File, Folder, alert */
+
+if (typeof ($) === 'undefined') {
+ var $ = {};
+}
+
+if (typeof (app) === 'undefined') {
+ var app = {};
+}
+
+function keepExtention () {
+ return app.setExtensionPersistent('com.pype', 0);
+}
+
+keepExtention();
diff --git a/pype/premiere/lib.py b/pype/premiere/lib.py
new file mode 100644
index 0000000000..f03c98fb78
--- /dev/null
+++ b/pype/premiere/lib.py
@@ -0,0 +1,195 @@
+import os
+import sys
+import shutil
+import json
+from pysync import walktree
+import requests
+
+from avalon import api
+from pype.widgets.message_window import message
+from pypeapp import Logger
+
+
+log = Logger().get_logger(__name__, "premiere")
+
+self = sys.modules[__name__]
+self._has_been_setup = False
+self._registered_gui = None
+
+AVALON_CONFIG = os.environ["AVALON_CONFIG"]
+
+PARENT_DIR = os.path.dirname(__file__)
+PACKAGE_DIR = os.path.dirname(PARENT_DIR)
+PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+self.EXTENSIONS_PATH_REMOTE = os.path.join(PARENT_DIR, "extensions")
+self.EXTENSIONS_PATH_LOCAL = None
+self.EXTENSIONS_CACHE_PATH = None
+
+self.LOAD_PATH = os.path.join(PLUGINS_DIR, "premiere", "load")
+self.CREATE_PATH = os.path.join(PLUGINS_DIR, "premiere", "create")
+self.INVENTORY_PATH = os.path.join(PLUGINS_DIR, "premiere", "inventory")
+
+self.PUBLISH_PATH = os.path.join(
+ PLUGINS_DIR, "premiere", "publish"
+).replace("\\", "/")
+
+if os.getenv("PUBLISH_PATH", None):
+ if self.PUBLISH_PATH not in os.environ["PUBLISH_PATH"]:
+ os.environ["PUBLISH_PATH"] = os.pathsep.join(
+ os.environ["PUBLISH_PATH"].split(os.pathsep) +
+ [self.PUBLISH_PATH]
+ )
+else:
+ os.environ["PUBLISH_PATH"] = self.PUBLISH_PATH
+
+_clearing_cache = ["com.pype", "com.pype.rename"]
+
+
+def ls():
+ pass
+
+
+def reload_pipeline():
+ """Attempt to reload pipeline at run-time.
+
+ CAUTION: This is primarily for development and debugging purposes.
+
+ """
+
+ import importlib
+ import pype.premiere
+
+ api.uninstall()
+
+ for module in ("avalon.io",
+ "avalon.lib",
+ "avalon.pipeline",
+ "avalon.api",
+ "avalon.tools",
+
+ "{}".format(AVALON_CONFIG),
+ "{}.premiere".format(AVALON_CONFIG),
+ "{}.premiere.lib".format(AVALON_CONFIG)
+ ):
+ log.info("Reloading module: {}...".format(module))
+ try:
+ module = importlib.import_module(module)
+ importlib.reload(module)
+ except Exception as e:
+ log.warning("Cannot reload module: {}".format(e))
+
+ api.install(pype.premiere)
+
+
+def setup(env=None):
+ """ Running wrapper
+ """
+ if not env:
+ env = os.environ
+
+ self.EXTENSIONS_PATH_LOCAL = env["EXTENSIONS_PATH"]
+ self.EXTENSIONS_CACHE_PATH = env["EXTENSIONS_CACHE_PATH"]
+
+ log.info("Registering Adobe Premiere plug-ins..")
+ if not test_rest_api_server(env):
+ return
+
+ if not env.get("installed_zxp"):
+ # remove cep_cache from user temp dir
+ clearing_caches_ui()
+
+ # synchronize extensions
+ extensions_sync()
+ else:
+ log.info("Extensions installed as `.zxp`...")
+
+ log.info("Premiere Pype wrapper has been installed")
+
+
+def extensions_sync():
+ # TODO(antirotor): Bundle extension and install it
+ # we need to bundle extension as we are using third party node_modules
+ # to ease creation of bundle, lets create build script creating self-signed
+ # certificate and bundling extension to zxp format (using ZXPSignCmd from
+ # Adobe). If we find zxp in extension directory, we can install it via
+ # command line `ExManCmd /install` - using Adobe Extension Manager. If
+ # zxp is not found, we use old behaviour and just copy all files. Thus we
+ # maintain ability to develop and deploy at the same time.
+ #
+ # sources:
+ # https://helpx.adobe.com/extension-manager/using/command-line.html
+
+ process_pairs = list()
+ # get extensions dir in pype.premiere.extensions
+ # build dir path to premiere cep extensions
+
+ for name in os.listdir(self.EXTENSIONS_PATH_REMOTE):
+ log.debug("> name: {}".format(name))
+ src = os.path.join(self.EXTENSIONS_PATH_REMOTE, name)
+ dst = os.path.join(self.EXTENSIONS_PATH_LOCAL, name)
+ process_pairs.append((name, src, dst))
+
+ # synchronize all extensions
+ for name, src, dst in process_pairs:
+ if not os.path.isdir(src):
+ continue
+ if name not in _clearing_cache:
+ continue
+ if not os.path.exists(dst):
+ os.makedirs(dst, mode=0o777)
+ walktree(source=src, target=dst, options_input=["y", ">"])
+ log.info("Extension {0} from `{1}` copied to `{2}`".format(
+ name, src, dst
+ ))
+ # time.sleep(10)
+ return
+
+
+def clearing_caches_ui():
+ '''Before every start of premiere it will make sure there is not
+ outdated stuff in cep_cache dir'''
+
+ if not os.path.isdir(self.EXTENSIONS_CACHE_PATH):
+ os.makedirs(self.EXTENSIONS_CACHE_PATH, mode=0o777)
+ log.info("Created dir: {}".format(self.EXTENSIONS_CACHE_PATH))
+
+ for d in os.listdir(self.EXTENSIONS_CACHE_PATH):
+ match = [p for p in _clearing_cache
+ if str(p) in d]
+
+ if match:
+ try:
+ path = os.path.normpath(
+ os.path.join(self.EXTENSIONS_CACHE_PATH, d))
+ log.info("Removing dir: {}".format(path))
+ shutil.rmtree(path, ignore_errors=True)
+ except Exception as e:
+ log.error("problem: {}".format(e))
+
+
+def test_rest_api_server(env):
+ # from pprint import pformat
+ rest_url = env.get("PYPE_REST_API_URL")
+ project_name = "{AVALON_PROJECT}".format(**env)
+ URL = "/".join((rest_url,
+ "avalon/projects",
+ project_name))
+ log.debug("__ URL: {}".format(URL))
+ try:
+ req = requests.get(URL, data={}).text
+ req_json = json.loads(req)
+ # log.debug("_ req_json: {}".format(pformat(req_json)))
+ log.debug("__ projectName: {}".format(req_json["data"]["name"]))
+ assert req_json["data"]["name"] == project_name, (
+ "Project data from Rest API server not correct")
+ return True
+
+ except Exception as e:
+ message(title="Pype Rest API static server is not running ",
+ message=("Before you can run Premiere, make sure "
+ "the system Tray Pype icon is running and "
+ "submenu `service` with name `Rest API` is "
+ "with green icon."
+ "\n Error: {}".format(e)),
+ level="critical")
diff --git a/pype/premiere/ppro/css/avalon.min.css b/pype/premiere/ppro/css/avalon.min.css
new file mode 100644
index 0000000000..03471edfd3
--- /dev/null
+++ b/pype/premiere/ppro/css/avalon.min.css
@@ -0,0 +1,28 @@
+body {
+ background-color: #323238;
+ color: #eeeeee
+}
+
+#output {
+ background: #121212;
+ color: #eeeeee;
+ padding: 2em;
+ font-family: monospace;
+ font-weight: bold;
+ min-height: 8em
+}
+
+#output.error {
+ background: #FF0000;
+ color: #000000;
+ padding: 2em;
+ font-family: monospace;
+ font-weight: bold;
+ min-height: 8em
+}
+
+.dark>.list-group-item {
+ background: #454747
+}
+
+/*# sourceMappingURL=avalon.min.css.map */
\ No newline at end of file
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/css/avalon.min.css.map b/pype/premiere/ppro/css/avalon.min.css.map
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/css/avalon.min.css.map
rename to pype/premiere/ppro/css/avalon.min.css.map
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/css/avalon.scss b/pype/premiere/ppro/css/avalon.scss
similarity index 63%
rename from pype/premiere/extensions/com.pype.avalon/ppro/css/avalon.scss
rename to pype/premiere/ppro/css/avalon.scss
index cf06ece9be..24cee204dd 100644
--- a/pype/premiere/extensions/com.pype.avalon/ppro/css/avalon.scss
+++ b/pype/premiere/ppro/css/avalon.scss
@@ -12,6 +12,15 @@ body {
min-height: 8em;
}
+#output.error {
+ background: #FF0000;
+ color: #000000;
+ padding: 2em;
+ font-family: monospace;
+ font-weight: bold;
+ min-height: 8em
+}
+
.dark > .list-group-item {
background: #454747;
}
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/css/bootstrap.min.css b/pype/premiere/ppro/css/bootstrap.min.css
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/css/bootstrap.min.css
rename to pype/premiere/ppro/css/bootstrap.min.css
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/css/bootstrap.min.css.map b/pype/premiere/ppro/css/bootstrap.min.css.map
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/css/bootstrap.min.css.map
rename to pype/premiere/ppro/css/bootstrap.min.css.map
diff --git a/pype/premiere/ppro/debug.log b/pype/premiere/ppro/debug.log
new file mode 100644
index 0000000000..e952614383
--- /dev/null
+++ b/pype/premiere/ppro/debug.log
@@ -0,0 +1,48 @@
+[0403/172908.369:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.370:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/172908.371:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
+[0403/173803.977:ERROR:crash_report_database_win.cc(469)] failed to stat report
diff --git a/pype/premiere/ppro/img/blender.png b/pype/premiere/ppro/img/blender.png
new file mode 100644
index 0000000000..6070a51fae
Binary files /dev/null and b/pype/premiere/ppro/img/blender.png differ
diff --git a/pype/premiere/ppro/index.html b/pype/premiere/ppro/index.html
new file mode 100644
index 0000000000..ba2e2cdad7
--- /dev/null
+++ b/pype/premiere/ppro/index.html
@@ -0,0 +1,156 @@
+
+
+
+
+
+
+
Pype extention
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Refresh panel
+
+ -
+
+
+ -
+
Publish
+
+
+
+
+
+
+ -
+
+
+ -
+
+
+
+
+
+
+
+
+
+
+
+
Output
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pype/premiere/ppro/js/.eslintrc.json b/pype/premiere/ppro/js/.eslintrc.json
new file mode 100644
index 0000000000..4e1b23c2e4
--- /dev/null
+++ b/pype/premiere/ppro/js/.eslintrc.json
@@ -0,0 +1,69 @@
+{
+ "parserOptions": {
+ "ecmaVersion": 9,
+ "sourceType": "script",
+ "ecmaFeatures": {
+ "jsx": true
+ }
+ },
+ "rules": {
+ "constructor-super": 2,
+ "for-direction": 2,
+ "getter-return": 2,
+ "no-async-promise-executor": 2,
+ "no-case-declarations": 2,
+ "no-class-assign": 2,
+ "no-compare-neg-zero": 2,
+ "no-cond-assign": 2,
+ "no-const-assign": 2,
+ "no-constant-condition": 2,
+ "no-control-regex": 2,
+ "no-debugger": 2,
+ "no-delete-var": 2,
+ "no-dupe-args": 2,
+ "no-dupe-class-members": 2,
+ "no-dupe-keys": 2,
+ "no-duplicate-case": 2,
+ "no-empty": 2,
+ "no-empty-character-class": 2,
+ "no-empty-pattern": 2,
+ "no-ex-assign": 2,
+ "no-extra-boolean-cast": 2,
+ "no-extra-semi": 2,
+ "no-fallthrough": 2,
+ "no-func-assign": 2,
+ "no-global-assign": 2,
+ "no-inner-declarations": 2,
+ "no-invalid-regexp": 2,
+ "no-irregular-whitespace": 2,
+ "no-misleading-character-class": 2,
+ "no-mixed-spaces-and-tabs": 2,
+ "no-new-symbol": 2,
+ "no-obj-calls": 2,
+ "no-octal": 2,
+ "no-prototype-builtins": 2,
+ "no-redeclare": 2,
+ "no-regex-spaces": 2,
+ "no-self-assign": 2,
+ "no-shadow-restricted-names": 2,
+ "no-sparse-arrays": 2,
+ "no-this-before-super": 2,
+ "no-undef": 2,
+ "no-unexpected-multiline": 2,
+ "no-unreachable": 2,
+ "no-unsafe-finally": 2,
+ "no-unsafe-negation": 2,
+ "no-unused-labels": 2,
+ "no-unused-vars": 2,
+ "no-useless-catch": 2,
+ "no-useless-escape": 2,
+ "no-with": 2,
+ "require-yield": 2,
+ "use-isnan": 2,
+ "valid-typeof": 2
+ },
+ "env": {
+ "es2017": false,
+ "node": true
+ }
+}
\ No newline at end of file
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/build.js b/pype/premiere/ppro/js/build.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/build.js
rename to pype/premiere/ppro/js/build.js
diff --git a/pype/premiere/ppro/js/pype.js b/pype/premiere/ppro/js/pype.js
new file mode 100644
index 0000000000..db1cadeb3b
--- /dev/null
+++ b/pype/premiere/ppro/js/pype.js
@@ -0,0 +1,344 @@
+/* global CSInterface, $, PypeRestApiClient, SystemPath */
+/* eslint-env node, es2017, esversion:6 */
+
+class Pype {
+
+ /**
+ * Initialize important properties and load necessary JSX files.
+ */
+ constructor() {
+ this.csi = new CSInterface();
+ this.outputId = $("#output");
+
+ this.rootFolderPath = this.csi.getSystemPath(SystemPath.EXTENSION);
+ var extensionRoot = this.rootFolderPath + "/jsx/";
+ this.progress("Loading premiere.jsx", true);
+ this.csi.evalScript('$.evalFile("' + extensionRoot + '/PPRO/Premiere.jsx");', () => {
+ this.progress("Loading pype.jsx", true);
+ this.csi.evalScript('$.evalFile("' + extensionRoot + 'pype.jsx");', () => {
+ this.progress("Loading batchRenamer.jsx", true);
+ this.csi.evalScript('$.evalFile("' + extensionRoot + 'batchRenamer.jsx");', () => {
+ this._initialize();
+ });
+ });
+ });
+ }
+
+ _initialize() {
+ var self = this;
+ // get environment
+ this.csi.evalScript('$.pype.getProjectFileData();', (result) => {
+ if (result == "EvalScript error.") {
+ this.error("Cannot get project data.");
+ throw "Cannot get project data";
+ }
+ process.env.EXTENSION_PATH = this.rootFolderPath;
+ this.env = process.env;
+ var resultData = JSON.parse(result);
+ for (var key in resultData) {
+ this.env[key] = resultData[key];
+ }
+ this.csi.evalScript('$.pype.setEnvs(' + JSON.stringify(self.env) + ')');
+ this.pras = new PypeRestApiClient(this.env);
+ this.progress(`Getting presets for ${this.env.AVALON_PROJECT}`, true);
+ this.presets = this.pras.get_presets(this.env.AVALON_PROJECT)
+ .then((presets) => {
+ this.progress("transferring presets to jsx")
+ this.presets = presets;
+ this.csi.evalScript('$.pype.setProjectPreset(' + JSON.stringify(presets) + ');', () => {
+ this.progress("Panel's backend loaded...", true);
+ // bind encoding jobs event listener
+ this.csi.addEventListener("pype.EncoderJobsComplete", this._encodingDone);
+
+ // Bind Interface buttons
+ this._bindButtons();
+ });
+ });
+ });
+ }
+
+ /**
+ * Wrapper function over clip renamer
+ */
+ rename () {
+ let $renameId = $('#rename');
+ let data = {};
+ data.ep = $('input[name=episode]', $renameId).val();
+ data.epSuffix = $('input[name=ep_suffix]', $renameId).val();
+
+ if (!data.ep) {
+ this.csi.evalScript('$.pype.alert_message("' + 'Need to fill episode code' + '")');
+ return;
+ }
+
+ if (!data.epSuffix) {
+ this.csi.evalScript('$.pype.alert_message("' + 'Need to fill episode longer suffix' + '")');
+ return;
+ }
+
+ this.progress(`Doing rename [ ${data.ep} ] | [ ${data.epSuffix} ]`);
+ this.csi.evalScript(
+ 'BatchRenamer.renameTargetedTextLayer(' + JSON.stringify(data) + ' );', (result) => {
+ this.progress(`Renaming result: ${result}`, true);
+ });
+ }
+
+ _bindButtons() {
+ var self = this;
+ $('#btn-publish').click(function () {
+ self.publish();
+ });
+
+ $('#btn-rename').click(function () {
+ self.rename();
+ });
+
+ $('#btn-send-reset').click(function () {
+ $('#publish input[name=send-path]').val("");
+ });
+
+ $('#btn-get-reset').click(function () {
+ $('#publish input[name=get-path]').val("");
+ });
+
+ $('#btn-newWorkfileVersion').click(function () {
+ self.csi.evalScript('$.pype.versionUpWorkFile();');
+ self.progress('New version of the project file saved...', true);
+ });
+
+ $('#btn-get-frame').click(function () {
+ self.csi.evalScript('$._PPP_.exportCurrentFrameAsPNG();', (result) => {
+ self.progress(`Screen grabing image path in: [${result}]`, true);
+ });
+});
+ }
+
+ /**
+ * Normalize slashes in path string
+ * @param {String} path
+ */
+ static convertPathString (path) {
+ return path.replace(
+ new RegExp('\\\\', 'g'), '/').replace(new RegExp('//\\?/', 'g'), '');
+ }
+ /**
+ * Gather all user UI options for publishing
+ */
+ _gatherPublishUI() {
+ let publishId = $('#publish');
+ let uiVersionUp = $('input[name=version-up]', publishId);
+ let uiAudioOnly = $('input[name=audio-only]', publishId);
+ let uiJsonSendPath = $('input[name=send-path]', publishId);
+ let uiJsonGetPath = $('input[name=get-path]', publishId);
+ this.publishUI = {
+ "versionUp": uiVersionUp.prop('checked'),
+ "audioOnly": uiAudioOnly.prop('checked'),
+ "jsonSendPath": uiJsonSendPath.val(),
+ "jsonGetPath": uiJsonGetPath.val()
+ }
+ }
+
+ _getStagingDir() {
+ const path = require('path');
+ const UUID = require('pure-uuid');
+ const os = require('os');
+
+ const id = new UUID(4).format();
+ return path.join(os.tmpdir(), id);
+ }
+
+ /**
+ * Create staging directories and copy project files
+ * @param {object} projectData Project JSON data
+ */
+ _copyProjectFiles(projectData) {
+ const path = require('path');
+ const fs = require('fs');
+ const mkdirp = require('mkdirp');
+
+ this.stagingDir = this._getStagingDir();
+
+ this.progress(`Creating directory [ ${this.stagingDir} ]`, true);
+
+ mkdirp.sync(this.stagingDir);
+
+ let stagingDir = Pype.convertPathString(this.stagingDir);
+ const destination = Pype.convertPathString(
+ path.join(stagingDir, projectData.projectfile));
+
+ this.progress(`Copying files from [ ${projectData.projectpath} ] -> [ ${destination} ]`);
+ fs.copyFileSync(projectData.projectpath, destination);
+
+ this.progress("Project files copied.", true);
+ }
+
+ _encodeRepresentation(repre) {
+ var self = this;
+ return new Promise(function(resolve, reject) {
+ self.csi.evalScript('$.pype.encodeRepresentation(' + JSON.stringify(repre) + ');', (result) => {
+ if (result == "EvalScript error.") {
+ reject(result);
+ }
+ self.progress("Encoding files to Encoder queue submitted ...", true);
+ const jsonfile = require('jsonfile');
+ let jsonContent = JSON.parse(result);
+ if (self.publishUI.jsonSendPath == "") {
+ self.publishUI.jsonSendPath = self.stagingDir + "\\publishSend.json";
+ $('#publish input[name=send-path]').val(self.publishUI.jsonSendPath);
+ }
+ if (self.publishUI.jsonGetPath == "") {
+ self.publishUI.jsonGetPath = self.stagingDir + "_publishGet.json";
+ $('#publish input[name=get-path]').val(self.publishUI.jsonGetPath);
+ }
+ jsonfile.writeFile(self.publishUI.jsonSendPath, jsonContent);
+ resolve(result);
+ });
+ });
+ }
+
+ _getPyblishRequest(stagingDir) {
+ var self = this;
+ return new Promise(function(resolve, reject) {
+ self.csi.evalScript("$.pype.getPyblishRequest('" + stagingDir + "', '" + self.publishUI.audioOnly + "');", (result) => {
+ if (result === "null" || result === "EvalScript error.") {
+ self.error(`cannot create publish request data ${result}`);
+ reject("cannot create publish request data");
+ } else {
+ console.log(`Request generated: ${result}`);
+ resolve(result);
+ }
+ });
+ });
+ }
+
+ publish() {
+ this._gatherPublishUI();
+ if (this.publishUI.jsonSendPath === "") {
+ // path is empty, so we first prepare data for publishing
+ // and create json
+
+ this.progress("Gathering project data ...", true);
+ this.csi.evalScript('$.pype.getProjectFileData();', (result) => {
+ this._copyProjectFiles(JSON.parse(result))
+ // create request and start encoding
+ // after that is done, we should recieve event and continue in
+ // _encodingDone()
+ this.progress("Creating publishing request ...", true)
+ this._getPyblishRequest(Pype.convertPathString(this.stagingDir))
+ .then(result => {
+ this.progress('Encoding ...');
+ this._encodeRepresentation(JSON.parse(result))
+ .then(result => {
+ console.log('printing result from enconding.. ' + result);
+ })
+ .catch(error => {
+ this.error(`failed to encode: ${error}`);
+ });
+ }, error => {
+ this.error(`failed to publish: ${error}`);
+ });
+ this.progress("Waiting for result ...");
+ });
+ } else {
+ // load request
+ var dataToPublish = {
+ "adobePublishJsonPathSend": this.publishUI.jsonSendPath,
+ "adobePublishJsonPathGet": this.publishUI.jsonGetPath,
+ "project": this.env.AVALON_PROJECT,
+ "asset": this.env.AVALON_ASSET,
+ "task": this.env.AVALON_TASK,
+ "workdir": Pype.convertPathString(this.env.AVALON_WORKDIR),
+ "AVALON_APP": this.env.AVALON_APP,
+ "AVALON_APP_NAME": this.env.AVALON_APP_NAME
+ }
+ // C:\Users\jezsc\AppData\Local\Temp\4c56ba52-8839-44de-b327-0187c79d0814\publishSend.json
+ this.pras.publish(JSON.stringify(dataToPublish))
+ .then((result) => {
+ const fs = require('fs');
+ if (fs.existsSync(result.return_data_path)) {
+ if (this.publishUI.versionUp) {
+ this.progress('Saving new version of the project file', true);
+ this.csi.evalScript('$.pype.versionUpWorkFile();');
+ }
+ // here jsonSetPath and jsonGetPath are set to gui
+ $('#publish input[name=send-path]').val("");
+ $('#publish input[name=get-path]').val("");
+ this.progress("Publishing done.", true);
+ } else {
+ this.error("Publish has not finished correctly");
+ throw "Publish has not finished correctly";
+ }
+ }, (error) => {
+ this.error("Invalid response from server");
+ console.error(error);
+ });
+ }
+ }
+
+ _encodingDone(event) {
+ // this will be global in this context
+ console.debug(event);
+ this.pype.progress("Publishing event after encoding finished recieved ...", true);
+ var dataToPublish = {
+ "adobePublishJsonPathSend": this.pype.publishUI.jsonSendPath,
+ "adobePublishJsonPathGet": this.pype.publishUI.jsonGetPath,
+ "gui": true,
+ // "publishPath": Pype.convertPathString(this.pype.env.PUBLISH_PATH),
+ "project": this.pype.env.AVALON_PROJECT,
+ "asset": this.pype.env.AVALON_ASSET,
+ "task": this.pype.env.AVALON_TASK,
+ "workdir": Pype.convertPathString(this.pype.env.AVALON_WORKDIR),
+ "AVALON_APP": this.pype.env.AVALON_APP,
+ "AVALON_APP_NAME": this.pype.env.AVALON_APP_NAME
+ }
+
+ this.pype.progress("Preparing publish ...", true);
+ console.log(JSON.stringify(dataToPublish));
+ this.pype.pras.publish(JSON.stringify(dataToPublish))
+ .then((result) => {
+ const fs = require('fs');
+ if (fs.existsSync(result.return_data_path)) {
+ if (this.pype.publishUI.versionUp) {
+ this.pype.progress('Saving new version of the project file', true);
+ this.pype.csi.evalScript('$.pype.versionUpWorkFile();');
+ }
+ // here jsonSetPath and jsonGetPath are set to gui
+ $('#publish input[name=send-path]').val("");
+ $('#publish input[name=get-path]').val("");
+ this.pype.progress("Publishing done.", true);
+ } else {
+ this.pype.error("Publish has not finished correctly")
+ throw "Publish has not finished correctly";
+ }
+ }, (error) => {
+ this.pype.error("Invalid response from server");
+ console.error(error);
+ });
+ }
+
+ /**
+ * Display error message in div
+ * @param {String} message
+ */
+ error(message) {
+ this.outputId.html(message);
+ this.outputId.addClass("error");
+ console.error(message);
+ }
+
+ /**
+ * Display message in output div. If append is set, new message is appended to rest with
+ * @param {String} message
+ * @param {Boolean} append
+ */
+ progress(message, append=false) {
+ this.outputId.removeClass("error");
+ if (append) {
+ this.outputId.prepend(message + "
");
+ }
+ console.info(message);
+ }
+}
+$(function() {
+ global.pype = new Pype();
+});
diff --git a/pype/premiere/ppro/js/pype_restapi_client.js b/pype/premiere/ppro/js/pype_restapi_client.js
new file mode 100644
index 0000000000..b9a5ec9425
--- /dev/null
+++ b/pype/premiere/ppro/js/pype_restapi_client.js
@@ -0,0 +1,69 @@
+/* eslint-env node, es2017, esversion:6 */
+
+// connecting pype module pype rest api server (pras)
+
+
+class PypeRestApiClient {
+
+ constructor(env) {
+ this.env = env;
+ }
+
+ /**
+ * Return url for pype rest api server service
+ * @return {url string}
+ */
+ _getApiServerUrl() {
+ var url = this.env.PYPE_REST_API_URL;
+ return url
+ }
+
+ /**
+ * Return JSON from server. This will wait for result.
+ * @todo handle status codes and non-json data
+ * @param {String} url server url
+ * @param {object} options request options
+ */
+ async getResponseFromRestApiServer(url, options = {}) {
+ const fetch = require('node-fetch');
+ let defaults = {
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json"
+ }
+ }
+ let settings = {...defaults, ...options}
+ const res = await fetch(url, settings);
+ return await res.json();
+ }
+
+
+ /**
+ * Return presets for project from server
+ * @param {String} projectName
+ */
+ async get_presets(projectName) {
+ let server = this._getApiServerUrl();
+ let url = `${server}/adobe/presets/${projectName}`;
+ console.log("connecting ...");
+ let response = await this.getResponseFromRestApiServer(url)
+ console.log("got presets:");
+ console.log(response.data);
+ return response.data;
+ }
+
+ async publish(data) {
+ let server = this._getApiServerUrl();
+ let url = `${server}/adobe/publish`;
+
+ let headers = {
+ "Content-Type": "application/json"
+ }
+ console.log("connecting ...");
+ let response = await this.getResponseFromRestApiServer(
+ url, {method: 'POST', headers: headers, body: data});
+ console.log("got response:");
+ console.log(response.data);
+ return response.data;
+ }
+}
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/CSInterface-8.js b/pype/premiere/ppro/js/vendor/CSInterface-8.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/CSInterface-8.js
rename to pype/premiere/ppro/js/vendor/CSInterface-8.js
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/bootstrap.min.js b/pype/premiere/ppro/js/vendor/bootstrap.min.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/bootstrap.min.js
rename to pype/premiere/ppro/js/vendor/bootstrap.min.js
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/bootstrap.min.js.map b/pype/premiere/ppro/js/vendor/bootstrap.min.js.map
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/bootstrap.min.js.map
rename to pype/premiere/ppro/js/vendor/bootstrap.min.js.map
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/jquery-3.3.1.min.js b/pype/premiere/ppro/js/vendor/jquery-3.3.1.min.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/jquery-3.3.1.min.js
rename to pype/premiere/ppro/js/vendor/jquery-3.3.1.min.js
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/json2.js b/pype/premiere/ppro/js/vendor/json2.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/json2.js
rename to pype/premiere/ppro/js/vendor/json2.js
diff --git a/pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/popper.min.js b/pype/premiere/ppro/js/vendor/popper.min.js
similarity index 100%
rename from pype/premiere/extensions/com.pype.avalon/ppro/js/vendor/popper.min.js
rename to pype/premiere/ppro/js/vendor/popper.min.js
diff --git a/pype/premiere/templates.py b/pype/premiere/templates.py
deleted file mode 100644
index 33a7a6ff61..0000000000
--- a/pype/premiere/templates.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from pype import api as pype
-
-log = pype.Logger.getLogger(__name__, "premiere")
-
-
-def get_anatomy(**kwarg):
- return pype.Anatomy
-
-
-def get_dataflow(**kwarg):
- log.info(kwarg)
- host = kwarg.get("host", "premiere")
- cls = kwarg.get("class", None)
- preset = kwarg.get("preset", None)
- assert any([host, cls]), log.error("premiera.templates.get_dataflow():"
- "Missing mandatory kwargs `host`, `cls`")
-
- pr_dataflow = getattr(pype.Dataflow, str(host), None)
- pr_dataflow_node = getattr(pr_dataflow.nodes, str(cls), None)
- if preset:
- pr_dataflow_node = getattr(pr_dataflow_node, str(preset), None)
-
- log.info("Dataflow: {}".format(pr_dataflow_node))
- return pr_dataflow_node
-
-
-def get_colorspace(**kwarg):
- log.info(kwarg)
- host = kwarg.get("host", "premiere")
- cls = kwarg.get("class", None)
- preset = kwarg.get("preset", None)
- assert any([host, cls]), log.error("premiera.templates.get_colorspace():"
- "Missing mandatory kwargs `host`, `cls`")
-
- pr_colorspace = getattr(pype.Colorspace, str(host), None)
- pr_colorspace_node = getattr(pr_colorspace, str(cls), None)
- if preset:
- pr_colorspace_node = getattr(pr_colorspace_node, str(preset), None)
-
- log.info("Colorspace: {}".format(pr_colorspace_node))
- return pr_colorspace_node
diff --git a/pype/scripts/otio_burnin.py b/pype/scripts/otio_burnin.py
index 8d0b925089..d3d7f4c457 100644
--- a/pype/scripts/otio_burnin.py
+++ b/pype/scripts/otio_burnin.py
@@ -36,7 +36,8 @@ TIMECODE = (
MISSING_KEY_VALUE = "N/A"
CURRENT_FRAME_KEY = "{current_frame}"
CURRENT_FRAME_SPLITTER = "_-_CURRENT_FRAME_-_"
-TIME_CODE_KEY = "{timecode}"
+TIMECODE_KEY = "{timecode}"
+SOURCE_TIMECODE_KEY = "{source_timecode}"
def _streams(source):
@@ -146,11 +147,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
options = ffmpeg_burnins.TextOptions(**self.options_init)
options = options.copy()
- if frame_start:
+ if frame_start is not None:
options["frame_offset"] = frame_start
# `frame_end` is only for meassurements of text position
- if frame_end:
+ if frame_end is not None:
options["frame_end"] = frame_end
self._add_burnin(text, align, options, DRAWTEXT)
@@ -172,11 +173,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
options = ffmpeg_burnins.TimeCodeOptions(**self.options_init)
options = options.copy()
- if frame_start:
+ if frame_start is not None:
options["frame_offset"] = frame_start
# `frame_end` is only for meassurements of text position
- if frame_end:
+ if frame_end is not None:
options["frame_end"] = frame_end
if not frame_start_tc:
@@ -188,10 +189,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
if not options.get("fps"):
options["fps"] = self.frame_rate
- options["timecode"] = ffmpeg_burnins._frames_to_timecode(
- frame_start_tc,
- self.frame_rate
- )
+ if isinstance(frame_start_tc, str):
+ options["timecode"] = frame_start_tc
+ else:
+ options["timecode"] = ffmpeg_burnins._frames_to_timecode(
+ frame_start_tc,
+ self.frame_rate
+ )
self._add_burnin(text, align, options, TIMECODE)
@@ -208,7 +212,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
if CURRENT_FRAME_SPLITTER in text:
frame_start = options["frame_offset"]
frame_end = options.get("frame_end", frame_start)
- if not frame_start:
+ if frame_start is None:
replacement_final = replacement_size = str(MISSING_KEY_VALUE)
else:
replacement_final = "\\'{}\\'".format(
@@ -296,7 +300,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
args=args,
overwrite=overwrite
)
- print(command)
+ # print(command)
proc = subprocess.Popen(command, shell=True)
proc.communicate()
@@ -412,7 +416,14 @@ def burnins_from_data(
data[CURRENT_FRAME_KEY[1:-1]] = CURRENT_FRAME_SPLITTER
if frame_start_tc is not None:
- data[TIME_CODE_KEY[1:-1]] = TIME_CODE_KEY
+ data[TIMECODE_KEY[1:-1]] = TIMECODE_KEY
+
+ source_timecode = stream.get("timecode")
+ if source_timecode is None:
+ source_timecode = stream.get("tags", {}).get("timecode")
+
+ if source_timecode is not None:
+ data[SOURCE_TIMECODE_KEY[1:-1]] = SOURCE_TIMECODE_KEY
for align_text, value in presets.get('burnins', {}).items():
if not value:
@@ -425,8 +436,6 @@ def burnins_from_data(
" (Make sure you have new burnin presets)."
).format(str(type(value)), str(value)))
- has_timecode = TIME_CODE_KEY in value
-
align = None
align_text = align_text.strip().lower()
if align_text == "top_left":
@@ -442,6 +451,7 @@ def burnins_from_data(
elif align_text == "bottom_right":
align = ModifiedBurnins.BOTTOM_RIGHT
+ has_timecode = TIMECODE_KEY in value
# Replace with missing key value if frame_start_tc is not set
if frame_start_tc is None and has_timecode:
has_timecode = False
@@ -449,7 +459,13 @@ def burnins_from_data(
"`frame_start` and `frame_start_tc`"
" are not set in entered data."
)
- value = value.replace(TIME_CODE_KEY, MISSING_KEY_VALUE)
+ value = value.replace(TIMECODE_KEY, MISSING_KEY_VALUE)
+
+ has_source_timecode = SOURCE_TIMECODE_KEY in value
+ if source_timecode is None and has_source_timecode:
+ has_source_timecode = False
+ log.warning("Source does not have set timecode value.")
+ value = value.replace(SOURCE_TIMECODE_KEY, MISSING_KEY_VALUE)
key_pattern = re.compile(r"(\{.*?[^{0]*\})")
@@ -465,10 +481,20 @@ def burnins_from_data(
value = value.replace(key, MISSING_KEY_VALUE)
# Handle timecode differently
+ if has_source_timecode:
+ args = [align, frame_start, frame_end, source_timecode]
+ if not value.startswith(SOURCE_TIMECODE_KEY):
+ value_items = value.split(SOURCE_TIMECODE_KEY)
+ text = value_items[0].format(**data)
+ args.append(text)
+
+ burnin.add_timecode(*args)
+ continue
+
if has_timecode:
args = [align, frame_start, frame_end, frame_start_tc]
- if not value.startswith(TIME_CODE_KEY):
- value_items = value.split(TIME_CODE_KEY)
+ if not value.startswith(TIMECODE_KEY):
+ value_items = value.split(TIMECODE_KEY)
text = value_items[0].format(**data)
args.append(text)
diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py
index fe795564a5..a41d97668e 100644
--- a/pype/scripts/publish_filesequence.py
+++ b/pype/scripts/publish_filesequence.py
@@ -25,18 +25,6 @@ log.setLevel(logging.DEBUG)
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
-def _load_json(path):
- assert os.path.isfile(path), ("path to json file doesn't exist")
- data = None
- with open(path, "r") as json_file:
- try:
- data = json.load(json_file)
- except Exception as exc:
- log.error(
- "Error loading json: "
- "{} - Exception: {}".format(path, exc)
- )
- return data
def __main__():
parser = argparse.ArgumentParser()
@@ -90,12 +78,6 @@ def __main__():
paths = kwargs.paths or [os.environ.get("PYPE_METADATA_FILE")] or [os.getcwd()] # noqa
- for path in paths:
- data = _load_json(path)
- log.info("Setting session using data from file")
- os.environ["AVALON_PROJECT"] = data["session"]["AVALON_PROJECT"]
- break
-
args = [
os.path.join(pype_root, pype_command),
"publish",
diff --git a/pype/scripts/slates/__init__.py b/pype/scripts/slates/__init__.py
new file mode 100644
index 0000000000..52937708ea
--- /dev/null
+++ b/pype/scripts/slates/__init__.py
@@ -0,0 +1,2 @@
+from . import slate_base
+from .slate_base import api
diff --git a/pype/scripts/slates/__main__.py b/pype/scripts/slates/__main__.py
new file mode 100644
index 0000000000..bd49389d84
--- /dev/null
+++ b/pype/scripts/slates/__main__.py
@@ -0,0 +1,18 @@
+import sys
+import json
+from slate_base import api
+
+
+def main(in_args=None):
+ data_arg = in_args[-1]
+ in_data = json.loads(data_arg)
+ api.create_slates(
+ in_data["fill_data"],
+ in_data.get("slate_name"),
+ in_data.get("slate_data"),
+ in_data.get("data_output_json")
+ )
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/pype/scripts/slates/slate_base/__init__.py b/pype/scripts/slates/slate_base/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/pype/scripts/slates/slate_base/api.py b/pype/scripts/slates/slate_base/api.py
new file mode 100644
index 0000000000..cd64c68134
--- /dev/null
+++ b/pype/scripts/slates/slate_base/api.py
@@ -0,0 +1,15 @@
+from .font_factory import FontFactory
+from .base import BaseObj, load_default_style
+from .main_frame import MainFrame
+from .layer import Layer
+from .items import (
+ BaseItem,
+ ItemImage,
+ ItemRectangle,
+ ItemPlaceHolder,
+ ItemText,
+ ItemTable,
+ TableField
+)
+from .lib import create_slates
+from .example import example
diff --git a/pype/scripts/slates/slate_base/base.py b/pype/scripts/slates/slate_base/base.py
new file mode 100644
index 0000000000..35ef46769c
--- /dev/null
+++ b/pype/scripts/slates/slate_base/base.py
@@ -0,0 +1,373 @@
+import os
+import re
+import logging
+import copy
+import json
+from uuid import uuid4
+
+
+def load_default_style():
+ cur_folder = os.path.dirname(os.path.abspath(__file__))
+ default_json_path = os.path.join(cur_folder, "default_style.json")
+ with open(default_json_path, "r") as _file:
+ data = _file.read()
+ return json.loads(data)
+
+
+class BaseObj:
+ """Base Object for slates."""
+
+ obj_type = None
+ available_parents = []
+ all_style_keys = [
+ "font-family", "font-size", "font-color", "font-bold", "font-italic",
+ "bg-color", "bg-alter-color",
+ "alignment-horizontal", "alignment-vertical",
+ "padding", "padding-left", "padding-right",
+ "padding-top", "padding-bottom",
+ "margin", "margin-left", "margin-right",
+ "margin-top", "margin-bottom", "width", "height",
+ "fill", "word-wrap", "ellide", "max-lines"
+ ]
+ fill_data_regex = r"{[^}]+}"
+
+ def __init__(self, parent, style={}, name=None, pos_x=None, pos_y=None):
+ if not self.obj_type:
+ raise NotImplementedError(
+ "Class don't have set object type <{}>".format(
+ self.__class__.__name__
+ )
+ )
+
+ parent_obj_type = None
+ if parent:
+ parent_obj_type = parent.obj_type
+
+ if parent_obj_type not in self.available_parents:
+ expected_parents = ", ".join(self.available_parents)
+ raise Exception((
+ "Invalid parent <{}> for <{}>. Expected <{}>"
+ ).format(
+ parent.__class__.__name__, self.obj_type, expected_parents
+ ))
+
+ self.parent = parent
+ self._style = style
+
+ self.id = uuid4()
+ self.name = name
+ self.items = {}
+
+ self._pos_x = pos_x or 0
+ self._pos_y = pos_y or 0
+
+ log_parts = []
+ module = self.__class__.__module__
+ if module and module != "__main__":
+ log_parts.append(module)
+ log_parts.append(self.__class__.__name__)
+ self.log = logging.getLogger(".".join(log_parts))
+
+ if parent:
+ parent.add_item(self)
+
+ def fill_data_format(self):
+ return
+
+ @property
+ def fill_data(self):
+ return self.parent.fill_data
+
+ @property
+ def main_style(self):
+ return load_default_style()
+
+ def height(self):
+ raise NotImplementedError(
+ "Attribute `height` is not implemented for <{}>".format(
+ self.__clas__.__name__
+ )
+ )
+
+ def width(self):
+ raise NotImplementedError(
+ "Attribute `width` is not implemented for <{}>".format(
+ self.__clas__.__name__
+ )
+ )
+
+ def collect_data(self):
+ return None
+
+ def find_item(self, obj_type=None, name=None):
+ obj_type_fits = False
+ name_fits = False
+ if obj_type is None or self.obj_type == obj_type:
+ obj_type_fits = True
+
+ if name is None or self.name != name:
+ name_fits = True
+
+ output = []
+ if obj_type_fits and name_fits:
+ output.append(self)
+
+ if not self.items:
+ return output
+
+ for item in self.items.values():
+ output.extend(
+ item.find_item(obj_type=obj_type, name=name)
+ )
+ return output
+
+ @property
+ def full_style(self):
+ if self.parent is not None:
+ style = dict(val for val in self.parent.full_style.items())
+ else:
+ style = self.main_style
+
+ for key, value in self._style.items():
+ if key in self.all_style_keys:
+ # TODO which variant is right?
+ style[self.obj_type][key] = value
+ # style["*"][key] = value
+ else:
+ if key not in style:
+ style[key] = {}
+
+ if isinstance(style[key], dict):
+ style[key].update(value)
+ else:
+ style[key] = value
+
+ return style
+
+ def get_style_for_obj_type(self, obj_type, style=None):
+ if not style:
+ style = copy.deepcopy(self.full_style)
+
+ base = style.get("*") or {}
+ obj_specific = style.get(obj_type) or {}
+ name_specific = {}
+ if self.name:
+ name = str(self.name)
+ if not name.startswith("#"):
+ name = "#" + name
+ name_specific = style.get(name) or {}
+
+ if obj_type == "table-item":
+ col_regex = r"table-item-col\[([\d\-, ]+)*\]"
+ row_regex = r"table-item-row\[([\d\-, ]+)*\]"
+ field_regex = (
+ r"table-item-field\[(([ ]+)?\d+([ ]+)?:([ ]+)?\d+([ ]+)?)*\]"
+ )
+ # STRICT field regex (not allowed spaces)
+ # fild_regex = r"table-item-field\[(\d+:\d+)*\]"
+
+ def get_indexes_from_regex_match(result, field=False):
+ group = result.group(1)
+ indexes = []
+ if field:
+ return [
+ int(part.strip()) for part in group.strip().split(":")
+ ]
+
+ parts = group.strip().split(",")
+ for part in parts:
+ part = part.strip()
+ if "-" not in part:
+ indexes.append(int(part))
+ continue
+
+ sub_parts = [
+ int(sub.strip()) for sub in part.split("-")
+ ]
+ if len(sub_parts) != 2:
+ # TODO logging
+ self.log.warning("Invalid range '{}'".format(part))
+ continue
+
+ for idx in range(sub_parts[0], sub_parts[1]+1):
+ indexes.append(idx)
+ return indexes
+
+ for key, value in style.items():
+ if not key.startswith(obj_type):
+ continue
+
+ result = re.search(col_regex, key)
+ if result:
+ indexes = get_indexes_from_regex_match(result)
+ if self.col_idx in indexes:
+ obj_specific.update(value)
+ continue
+
+ result = re.search(row_regex, key)
+ if result:
+ indexes = get_indexes_from_regex_match(result)
+ if self.row_idx in indexes:
+ obj_specific.update(value)
+ continue
+
+ result = re.search(field_regex, key)
+ if result:
+ row_idx, col_idx = get_indexes_from_regex_match(
+ result, True
+ )
+ if self.col_idx == col_idx and self.row_idx == row_idx:
+ obj_specific.update(value)
+
+ output = {}
+ output.update(base)
+ output.update(obj_specific)
+ output.update(name_specific)
+
+ return output
+
+ @property
+ def style(self):
+ return self.get_style_for_obj_type(self.obj_type)
+
+ @property
+ def item_pos_x(self):
+ if self.parent.obj_type == "main_frame":
+ return int(self._pos_x)
+ return 0
+
+ @property
+ def item_pos_y(self):
+ if self.parent.obj_type == "main_frame":
+ return int(self._pos_y)
+ return 0
+
+ @property
+ def content_pos_x(self):
+ pos_x = self.item_pos_x
+ margin = self.style["margin"]
+ margin_left = self.style.get("margin-left") or margin
+
+ pos_x += margin_left
+
+ return pos_x
+
+ @property
+ def content_pos_y(self):
+ pos_y = self.item_pos_y
+ margin = self.style["margin"]
+ margin_top = self.style.get("margin-top") or margin
+ return pos_y + margin_top
+
+ @property
+ def value_pos_x(self):
+ pos_x = int(self.content_pos_x)
+ padding = self.style["padding"]
+ padding_left = self.style.get("padding-left")
+ if padding_left is None:
+ padding_left = padding
+
+ pos_x += padding_left
+
+ return pos_x
+
+ @property
+ def value_pos_y(self):
+ pos_y = int(self.content_pos_y)
+ padding = self.style["padding"]
+ padding_top = self.style.get("padding-top")
+ if padding_top is None:
+ padding_top = padding
+
+ pos_y += padding_top
+
+ return pos_y
+
+ @property
+ def value_pos_start(self):
+ return (self.value_pos_x, self.value_pos_y)
+
+ @property
+ def value_pos_end(self):
+ pos_x, pos_y = self.value_pos_start
+ pos_x += self.width()
+ pos_y += self.height()
+ return (pos_x, pos_y)
+
+ @property
+ def content_pos_start(self):
+ return (self.content_pos_x, self.content_pos_y)
+
+ @property
+ def content_pos_end(self):
+ pos_x, pos_y = self.content_pos_start
+ pos_x += self.content_width()
+ pos_y += self.content_height()
+ return (pos_x, pos_y)
+
+ def value_width(self):
+ raise NotImplementedError(
+ "Attribute
is not implemented <{}>".format(
+ self.__class__.__name__
+ )
+ )
+
+ def value_height(self):
+ raise NotImplementedError(
+ "Attribute is not implemented for <{}>".format(
+ self.__class__.__name__
+ )
+ )
+
+ def content_width(self):
+ width = self.value_width()
+ padding = self.style["padding"]
+ padding_left = self.style.get("padding-left")
+ if padding_left is None:
+ padding_left = padding
+
+ padding_right = self.style.get("padding-right")
+ if padding_right is None:
+ padding_right = padding
+
+ return width + padding_left + padding_right
+
+ def content_height(self):
+ height = self.value_height()
+ padding = self.style["padding"]
+ padding_top = self.style.get("padding-top")
+ if padding_top is None:
+ padding_top = padding
+
+ padding_bottom = self.style.get("padding-bottom")
+ if padding_bottom is None:
+ padding_bottom = padding
+
+ return height + padding_top + padding_bottom
+
+ def width(self):
+ width = self.content_width()
+
+ margin = self.style["margin"]
+ margin_left = self.style.get("margin-left") or margin
+ margin_right = self.style.get("margin-right") or margin
+
+ return width + margin_left + margin_right
+
+ def height(self):
+ height = self.content_height()
+
+ margin = self.style["margin"]
+ margin_top = self.style.get("margin-top") or margin
+ margin_bottom = self.style.get("margin-bottom") or margin
+
+ return height + margin_bottom + margin_top
+
+ def add_item(self, item):
+ self.items[item.id] = item
+ item.fill_data_format()
+
+
+ def reset(self):
+ for item in self.items.values():
+ item.reset()
diff --git a/pype/scripts/slates/slate_base/default_style.json b/pype/scripts/slates/slate_base/default_style.json
new file mode 100644
index 0000000000..d0748846a5
--- /dev/null
+++ b/pype/scripts/slates/slate_base/default_style.json
@@ -0,0 +1,58 @@
+{
+ "*": {
+ "font-family": "arial",
+ "font-size": 26,
+ "font-color": "#ffffff",
+ "font-bold": false,
+ "font-italic": false,
+ "bg-color": "#0077ff",
+ "alignment-horizontal": "left",
+ "alignment-vertical": "top",
+ "word-wrap": true,
+ "ellide": true,
+ "max-lines": null
+ },
+ "layer": {
+ "padding": 0,
+ "margin": 0
+ },
+ "rectangle": {
+ "padding": 0,
+ "margin": 0,
+ "fill": true
+ },
+ "image": {
+ "padding": 0,
+ "margin": 0,
+ "fill": true
+ },
+ "placeholder": {
+ "padding": 0,
+ "margin": 0,
+ "fill": true
+ },
+ "main_frame": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "#252525"
+ },
+ "table": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "transparent"
+ },
+ "table-item": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "#212121",
+ "bg-alter-color": "#272727",
+ "font-color": "#dcdcdc",
+ "font-bold": false,
+ "font-italic": false,
+ "alignment-horizontal": "left",
+ "alignment-vertical": "top",
+ "word-wrap": false,
+ "ellide": true,
+ "max-lines": 1
+ }
+}
diff --git a/pype/scripts/slates/slate_base/example.py b/pype/scripts/slates/slate_base/example.py
new file mode 100644
index 0000000000..560f9ec02d
--- /dev/null
+++ b/pype/scripts/slates/slate_base/example.py
@@ -0,0 +1,254 @@
+# import sys
+# sys.append(r"PATH/TO/PILLOW/PACKAGE")
+
+from . import api
+
+
+def example():
+ """Example data to demontrate function.
+
+ It is required to fill "destination_path", "thumbnail_path"
+ and "color_bar_path" in `example_fill_data` to be able to execute.
+ """
+
+ example_fill_data = {
+ "destination_path": "PATH/TO/OUTPUT/FILE",
+ "project": {
+ "name": "Testing project"
+ },
+ "intent": "WIP",
+ "version_name": "seq01_sh0100_compositing_v01",
+ "date": "2019-08-09",
+ "shot_type": "2d comp",
+ "submission_note": (
+ "Lorem ipsum dolor sit amet, consectetuer adipiscing elit."
+ " Aenean commodo ligula eget dolor. Aenean massa."
+ " Cum sociis natoque penatibus et magnis dis parturient montes,"
+ " nascetur ridiculus mus. Donec quam felis, ultricies nec,"
+ " pellentesque eu, pretium quis, sem. Nulla consequat massa quis"
+ " enim. Donec pede justo, fringilla vel,"
+ " aliquet nec, vulputate eget, arcu."
+ ),
+ "thumbnail_path": "PATH/TO/THUMBNAIL/FILE",
+ "color_bar_path": "PATH/TO/COLOR/BAR/FILE",
+ "vendor": "Our Studio",
+ "shot_name": "sh0100",
+ "frame_start": 1001,
+ "frame_end": 1004,
+ "duration": 3
+ }
+
+ example_presets = {"example_HD": {
+ "width": 1920,
+ "height": 1080,
+ "destination_path": "{destination_path}",
+ "style": {
+ "*": {
+ "font-family": "arial",
+ "font-color": "#ffffff",
+ "font-bold": False,
+ "font-italic": False,
+ "bg-color": "#0077ff",
+ "alignment-horizontal": "left",
+ "alignment-vertical": "top"
+ },
+ "layer": {
+ "padding": 0,
+ "margin": 0
+ },
+ "rectangle": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "#E9324B",
+ "fill": True
+ },
+ "main_frame": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "#252525"
+ },
+ "table": {
+ "padding": 0,
+ "margin": 0,
+ "bg-color": "transparent"
+ },
+ "table-item": {
+ "padding": 5,
+ "padding-bottom": 10,
+ "margin": 0,
+ "bg-color": "#212121",
+ "bg-alter-color": "#272727",
+ "font-color": "#dcdcdc",
+ "font-bold": False,
+ "font-italic": False,
+ "alignment-horizontal": "left",
+ "alignment-vertical": "top",
+ "word-wrap": False,
+ "ellide": True,
+ "max-lines": 1
+ },
+ "table-item-col[0]": {
+ "font-size": 20,
+ "font-color": "#898989",
+ "font-bold": True,
+ "ellide": False,
+ "word-wrap": True,
+ "max-lines": None
+ },
+ "table-item-col[1]": {
+ "font-size": 40,
+ "padding-left": 10
+ },
+ "#colorbar": {
+ "bg-color": "#9932CC"
+ }
+ },
+ "items": [{
+ "type": "layer",
+ "direction": 1,
+ "name": "MainLayer",
+ "style": {
+ "#MainLayer": {
+ "width": 1094,
+ "height": 1000,
+ "margin": 25,
+ "padding": 0
+ },
+ "#LeftSide": {
+ "margin-right": 25
+ }
+ },
+ "items": [{
+ "type": "layer",
+ "name": "LeftSide",
+ "items": [{
+ "type": "layer",
+ "direction": 1,
+ "style": {
+ "table-item": {
+ "bg-color": "transparent",
+ "padding-bottom": 20
+ },
+ "table-item-col[0]": {
+ "font-size": 20,
+ "font-color": "#898989",
+ "alignment-horizontal": "right"
+ },
+ "table-item-col[1]": {
+ "alignment-horizontal": "left",
+ "font-bold": True,
+ "font-size": 40
+ }
+ },
+ "items": [{
+ "type": "table",
+ "values": [
+ ["Show:", "{project[name]}"]
+ ],
+ "style": {
+ "table-item-field[0:0]": {
+ "width": 150
+ },
+ "table-item-field[0:1]": {
+ "width": 580
+ }
+ }
+ }, {
+ "type": "table",
+ "values": [
+ ["Submitting For:", "{intent}"]
+ ],
+ "style": {
+ "table-item-field[0:0]": {
+ "width": 160
+ },
+ "table-item-field[0:1]": {
+ "width": 218,
+ "alignment-horizontal": "right"
+ }
+ }
+ }]
+ }, {
+ "type": "rectangle",
+ "style": {
+ "bg-color": "#bc1015",
+ "width": 1108,
+ "height": 5,
+ "fill": True
+ }
+ }, {
+ "type": "table",
+ "use_alternate_color": True,
+ "values": [
+ ["Version name:", "{version_name}"],
+ ["Date:", "{date}"],
+ ["Shot Types:", "{shot_type}"],
+ ["Submission Note:", "{submission_note}"]
+ ],
+ "style": {
+ "table-item": {
+ "padding-bottom": 20
+ },
+ "table-item-field[0:1]": {
+ "font-bold": True
+ },
+ "table-item-field[3:0]": {
+ "word-wrap": True,
+ "ellide": True,
+ "max-lines": 4
+ },
+ "table-item-col[0]": {
+ "alignment-horizontal": "right",
+ "width": 150
+ },
+ "table-item-col[1]": {
+ "alignment-horizontal": "left",
+ "width": 958
+ }
+ }
+ }]
+ }, {
+ "type": "layer",
+ "name": "RightSide",
+ "items": [{
+ "type": "placeholder",
+ "name": "thumbnail",
+ "path": "{thumbnail_path}",
+ "style": {
+ "width": 730,
+ "height": 412
+ }
+ }, {
+ "type": "placeholder",
+ "name": "colorbar",
+ "path": "{color_bar_path}",
+ "return_data": True,
+ "style": {
+ "width": 730,
+ "height": 55
+ }
+ }, {
+ "type": "table",
+ "use_alternate_color": True,
+ "values": [
+ ["Vendor:", "{vendor}"],
+ ["Shot Name:", "{shot_name}"],
+ ["Frames:", "{frame_start} - {frame_end} ({duration})"]
+ ],
+ "style": {
+ "table-item-col[0]": {
+ "alignment-horizontal": "left",
+ "width": 200
+ },
+ "table-item-col[1]": {
+ "alignment-horizontal": "right",
+ "width": 530,
+ "font-size": 30
+ }
+ }
+ }]
+ }]
+ }]
+ }}
+
+ api.create_slates(example_fill_data, "example_HD", example_presets)
diff --git a/pype/scripts/slates/slate_base/font_factory.py b/pype/scripts/slates/slate_base/font_factory.py
new file mode 100644
index 0000000000..77df9a40a7
--- /dev/null
+++ b/pype/scripts/slates/slate_base/font_factory.py
@@ -0,0 +1,93 @@
+import os
+import sys
+import collections
+
+from PIL import ImageFont
+
+
+class FontFactory:
+ fonts = None
+ default = None
+
+ @classmethod
+ def get_font(cls, family, font_size=None, italic=False, bold=False):
+ if cls.fonts is None:
+ cls.load_fonts()
+
+ styles = []
+ if bold:
+ styles.append("Bold")
+
+ if italic:
+ styles.append("Italic")
+
+ if not styles:
+ styles.append("Regular")
+
+ style = " ".join(styles)
+ family = family.lower()
+ family_styles = cls.fonts.get(family)
+ if not family_styles:
+ return cls.default
+
+ font = family_styles.get(style)
+ if font:
+ if font_size:
+ font = font.font_variant(size=font_size)
+ return font
+
+ # Return first found
+ for font in family_styles:
+ if font_size:
+ font = font.font_variant(size=font_size)
+ return font
+
+ return cls.default
+
+ @classmethod
+ def load_fonts(cls):
+
+ cls.default = ImageFont.load_default()
+
+ available_font_ext = [".ttf", ".ttc"]
+ dirs = []
+ if sys.platform == "win32":
+ # check the windows font repository
+ # NOTE: must use uppercase WINDIR, to work around bugs in
+ # 1.5.2's os.environ.get()
+ windir = os.environ.get("WINDIR")
+ if windir:
+ dirs.append(os.path.join(windir, "fonts"))
+
+ elif sys.platform in ("linux", "linux2"):
+ lindirs = os.environ.get("XDG_DATA_DIRS", "")
+ if not lindirs:
+ # According to the freedesktop spec, XDG_DATA_DIRS should
+ # default to /usr/share
+ lindirs = "/usr/share"
+ dirs += [
+ os.path.join(lindir, "fonts") for lindir in lindirs.split(":")
+ ]
+
+ elif sys.platform == "darwin":
+ dirs += [
+ "/Library/Fonts",
+ "/System/Library/Fonts",
+ os.path.expanduser("~/Library/Fonts")
+ ]
+
+ available_fonts = collections.defaultdict(dict)
+ for directory in dirs:
+ for walkroot, walkdir, walkfilenames in os.walk(directory):
+ for walkfilename in walkfilenames:
+ ext = os.path.splitext(walkfilename)[1]
+ if ext.lower() not in available_font_ext:
+ continue
+
+ fontpath = os.path.join(walkroot, walkfilename)
+ font_obj = ImageFont.truetype(fontpath)
+ family = font_obj.font.family.lower()
+ style = font_obj.font.style
+ available_fonts[family][style] = font_obj
+
+ cls.fonts = available_fonts
diff --git a/pype/scripts/slates/slate_base/items.py b/pype/scripts/slates/slate_base/items.py
new file mode 100644
index 0000000000..6d19fc6a0c
--- /dev/null
+++ b/pype/scripts/slates/slate_base/items.py
@@ -0,0 +1,667 @@
+import os
+import re
+from PIL import Image
+
+from .base import BaseObj
+from .font_factory import FontFactory
+
+
+class BaseItem(BaseObj):
+ available_parents = ["main_frame", "layer"]
+
+ @property
+ def item_pos_x(self):
+ if self.parent.obj_type == "main_frame":
+ return self._pos_x
+ return self.parent.child_pos_x(self.id)
+
+ @property
+ def item_pos_y(self):
+ if self.parent.obj_type == "main_frame":
+ return self._pos_y
+ return self.parent.child_pos_y(self.id)
+
+ def add_item(self, *args, **kwargs):
+ raise Exception("Can't add item to an item, use layers instead.")
+
+ def draw(self, image, drawer):
+ raise NotImplementedError(
+ "Method `draw` is not implemented for <{}>".format(
+ self.__clas__.__name__
+ )
+ )
+
+
+class ItemImage(BaseItem):
+ obj_type = "image"
+
+ def __init__(self, image_path, *args, **kwargs):
+ self.image_path = image_path
+ super(ItemImage, self).__init__(*args, **kwargs)
+
+ def fill_data_format(self):
+ if re.match(self.fill_data_regex, self.image_path):
+ self.image_path = self.image_path.format(**self.fill_data)
+
+ def draw(self, image, drawer):
+ source_image = Image.open(os.path.normpath(self.image_path))
+ paste_image = source_image.resize(
+ (self.value_width(), self.value_height()),
+ Image.ANTIALIAS
+ )
+ image.paste(
+ paste_image,
+ (self.value_pos_x, self.value_pos_y)
+ )
+
+ def value_width(self):
+ return int(self.style["width"])
+
+ def value_height(self):
+ return int(self.style["height"])
+
+
+class ItemRectangle(BaseItem):
+ obj_type = "rectangle"
+
+ def draw(self, image, drawer):
+ bg_color = self.style["bg-color"]
+ fill = self.style.get("fill", False)
+ kwargs = {}
+ if fill:
+ kwargs["fill"] = bg_color
+ else:
+ kwargs["outline"] = bg_color
+
+ start_pos_x = self.value_pos_x
+ start_pos_y = self.value_pos_y
+ end_pos_x = start_pos_x + self.value_width()
+ end_pos_y = start_pos_y + self.value_height()
+ drawer.rectangle(
+ (
+ (start_pos_x, start_pos_y),
+ (end_pos_x, end_pos_y)
+ ),
+ **kwargs
+ )
+
+ def value_width(self):
+ return int(self.style["width"])
+
+ def value_height(self):
+ return int(self.style["height"])
+
+
+class ItemPlaceHolder(BaseItem):
+ obj_type = "placeholder"
+
+ def __init__(self, image_path, *args, **kwargs):
+ self.image_path = image_path
+ super(ItemPlaceHolder, self).__init__(*args, **kwargs)
+
+ def fill_data_format(self):
+ if re.match(self.fill_data_regex, self.image_path):
+ self.image_path = self.image_path.format(**self.fill_data)
+
+ def draw(self, image, drawer):
+ bg_color = self.style["bg-color"]
+
+ kwargs = {}
+ if bg_color != "tranparent":
+ kwargs["fill"] = bg_color
+
+ start_pos_x = self.value_pos_x
+ start_pos_y = self.value_pos_y
+ end_pos_x = start_pos_x + self.value_width()
+ end_pos_y = start_pos_y + self.value_height()
+
+ drawer.rectangle(
+ (
+ (start_pos_x, start_pos_y),
+ (end_pos_x, end_pos_y)
+ ),
+ **kwargs
+ )
+
+ def value_width(self):
+ return int(self.style["width"])
+
+ def value_height(self):
+ return int(self.style["height"])
+
+ def collect_data(self):
+ return {
+ "pos_x": self.value_pos_x,
+ "pos_y": self.value_pos_y,
+ "width": self.value_width(),
+ "height": self.value_height(),
+ "path": self.image_path
+ }
+
+
+class ItemText(BaseItem):
+ obj_type = "text"
+
+ def __init__(self, value, *args, **kwargs):
+ self.value = value
+ super(ItemText, self).__init__(*args, **kwargs)
+
+ def draw(self, image, drawer):
+ bg_color = self.style["bg-color"]
+ if bg_color and bg_color.lower() != "transparent":
+ # TODO border outline styles
+ drawer.rectangle(
+ (self.content_pos_start, self.content_pos_end),
+ fill=bg_color,
+ outline=None
+ )
+
+ font_color = self.style["font-color"]
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ drawer.text(
+ self.value_pos_start,
+ self.value,
+ font=font,
+ fill=font_color
+ )
+
+ def value_width(self):
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ width = font.getsize(self.value)[0]
+ return int(width)
+
+ def value_height(self):
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ height = font.getsize(self.value)[1]
+ return int(height)
+
+
+class ItemTable(BaseItem):
+
+ obj_type = "table"
+
+ def __init__(self, values, use_alternate_color=False, *args, **kwargs):
+
+ self.values_by_cords = None
+ self.prepare_values(values)
+
+ super(ItemTable, self).__init__(*args, **kwargs)
+ self.size_values = None
+ self.calculate_sizes()
+
+ self.use_alternate_color = use_alternate_color
+
+ def add_item(self, item):
+ if item.obj_type == "table-item":
+ return
+ super(ItemTable, self).add_item(item)
+
+ def fill_data_format(self):
+ for item in self.values:
+ item.fill_data_format()
+
+ def prepare_values(self, _values):
+ values = []
+ values_by_cords = []
+ row_count = 0
+ col_count = 0
+ for row in _values:
+ row_count += 1
+ if len(row) > col_count:
+ col_count = len(row)
+
+ for row_idx in range(row_count):
+ values_by_cords.append([])
+ for col_idx in range(col_count):
+ values_by_cords[row_idx].append([])
+ if col_idx <= len(_values[row_idx]) - 1:
+ col = _values[row_idx][col_idx]
+ else:
+ col = ""
+
+ col_item = TableField(row_idx, col_idx, col, parent=self)
+ values_by_cords[row_idx][col_idx] = col_item
+ values.append(col_item)
+
+ self.values = values
+ self.values_by_cords = values_by_cords
+
+ def calculate_sizes(self):
+ row_heights = []
+ col_widths = []
+ for row_idx, row in enumerate(self.values_by_cords):
+ row_heights.append(0)
+ for col_idx, col_item in enumerate(row):
+ if len(col_widths) < col_idx + 1:
+ col_widths.append(0)
+
+ _width = col_widths[col_idx]
+ item_width = col_item.width()
+ if _width < item_width:
+ col_widths[col_idx] = item_width
+
+ _height = row_heights[row_idx]
+ item_height = col_item.height()
+ if _height < item_height:
+ row_heights[row_idx] = item_height
+
+ self.size_values = (row_heights, col_widths)
+
+ def draw(self, image, drawer):
+ bg_color = self.style["bg-color"]
+ if bg_color and bg_color.lower() != "transparent":
+ # TODO border outline styles
+ drawer.rectangle(
+ (self.content_pos_start, self.content_pos_end),
+ fill=bg_color,
+ outline=None
+ )
+
+ for value in self.values:
+ value.draw(image, drawer)
+
+ def value_width(self):
+ row_heights, col_widths = self.size_values
+ width = 0
+ for _width in col_widths:
+ width += _width
+
+ if width != 0:
+ width -= 1
+ return width
+
+ def value_height(self):
+ row_heights, col_widths = self.size_values
+ height = 0
+ for _height in row_heights:
+ height += _height
+
+ if height != 0:
+ height -= 1
+ return height
+
+ def content_pos_info_by_cord(self, row_idx, col_idx):
+ row_heights, col_widths = self.size_values
+ pos_x = int(self.value_pos_x)
+ pos_y = int(self.value_pos_y)
+ width = 0
+ height = 0
+ for idx, value in enumerate(col_widths):
+ if col_idx == idx:
+ width = value
+ break
+ pos_x += value
+
+ for idx, value in enumerate(row_heights):
+ if row_idx == idx:
+ height = value
+ break
+ pos_y += value
+
+ return (pos_x, pos_y, width, height)
+
+
+class TableField(BaseItem):
+
+ obj_type = "table-item"
+ available_parents = ["table"]
+ ellide_text = "..."
+
+ def __init__(self, row_idx, col_idx, value, *args, **kwargs):
+ super(TableField, self).__init__(*args, **kwargs)
+ self.row_idx = row_idx
+ self.col_idx = col_idx
+ self.value = value
+
+ def recalculate_by_width(self, value, max_width):
+ padding = self.style["padding"]
+ padding_left = self.style.get("padding-left")
+ if padding_left is None:
+ padding_left = padding
+
+ padding_right = self.style.get("padding-right")
+ if padding_right is None:
+ padding_right = padding
+
+ max_width -= (padding_left + padding_right)
+
+ if not value:
+ return ""
+
+ word_wrap = self.style.get("word-wrap")
+ ellide = self.style.get("ellide")
+ max_lines = self.style.get("max-lines")
+
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ val_width = font.getsize(value)[0]
+ if val_width <= max_width:
+ return value
+
+ if not ellide and not word_wrap:
+ # TODO logging
+ self.log.warning((
+ "Can't draw text because is too long with"
+ " `word-wrap` and `ellide` turned off <{}>"
+ ).format(value))
+ return ""
+
+ elif ellide and not word_wrap:
+ max_lines = 1
+
+ words = [word for word in value.split()]
+ words_len = len(words)
+ lines = []
+ last_index = None
+ while True:
+ start_index = 0
+ if last_index is not None:
+ start_index = int(last_index) + 1
+
+ line = ""
+ for idx in range(start_index, words_len):
+ _word = words[idx]
+ connector = " "
+ if line == "":
+ connector = ""
+
+ _line = connector.join([line, _word])
+ _line_width = font.getsize(_line)[0]
+ if _line_width > max_width:
+ break
+ line = _line
+ last_index = idx
+
+ if line:
+ lines.append(line)
+
+ if last_index == words_len - 1:
+ break
+
+ elif last_index is None:
+ add_message = ""
+ if ellide:
+ add_message = " String was shortened to `{}`."
+ line = ""
+ for idx, char in enumerate(words[idx]):
+ _line = line + char + self.ellide_text
+ _line_width = font.getsize(_line)[0]
+ if _line_width > max_width:
+ if idx == 0:
+ line = _line
+ break
+ line = line + char
+
+ lines.append(line)
+ # TODO logging
+ self.log.warning((
+ "Font size is too big.{} <{}>"
+ ).format(add_message, value))
+ break
+
+ output = ""
+ if not lines:
+ return output
+
+ over_max_lines = (max_lines and len(lines) > max_lines)
+ if not over_max_lines:
+ return "\n".join([line for line in lines])
+
+ lines = [lines[idx] for idx in range(max_lines)]
+ if not ellide:
+ return "\n".join(lines)
+
+ last_line = lines[-1]
+ last_line_width = font.getsize(last_line + self.ellide_text)[0]
+ if last_line_width <= max_width:
+ lines[-1] += self.ellide_text
+ return "\n".join([line for line in lines])
+
+ last_line_words = last_line.split()
+ if len(last_line_words) == 1:
+ if max_lines > 1:
+ # TODO try previous line?
+ lines[-1] = self.ellide_text
+ return "\n".join([line for line in lines])
+
+ line = ""
+ for idx, word in enumerate(last_line_words):
+ _line = line + word + self.ellide_text
+ _line_width = font.getsize(_line)[0]
+ if _line_width > max_width:
+ if idx == 0:
+ line = _line
+ break
+ line = _line
+ lines[-1] = line
+
+ return "\n".join([line for line in lines])
+
+ line = ""
+ for idx, _word in enumerate(last_line_words):
+ connector = " "
+ if line == "":
+ connector = ""
+
+ _line = connector.join([line, _word + self.ellide_text])
+ _line_width = font.getsize(_line)[0]
+
+ if _line_width <= max_width:
+ line = connector.join([line, _word])
+ continue
+
+ if idx != 0:
+ line += self.ellide_text
+ break
+
+ if max_lines != 1:
+ # TODO try previous line?
+ line = self.ellide_text
+ break
+
+ for idx, char in enumerate(_word):
+ _line = line + char + self.ellide_text
+ _line_width = font.getsize(_line)[0]
+ if _line_width > max_width:
+ if idx == 0:
+ line = _line
+ break
+ line = line + char
+ break
+
+ lines[-1] = line
+
+ return "\n".join([line for line in lines])
+
+ def fill_data_format(self):
+ value = self.value
+ if re.match(self.fill_data_regex, value):
+ value = value.format(**self.fill_data)
+
+ self.orig_value = value
+
+ max_width = self.style.get("max-width")
+ max_width = self.style.get("width") or max_width
+ if max_width:
+ value = self.recalculate_by_width(value, max_width)
+
+ self.value = value
+
+ def content_width(self):
+ width = self.style.get("width")
+ if width:
+ return int(width)
+ return super(TableField, self).content_width()
+
+ def content_height(self):
+ return super(TableField, self).content_height()
+
+ def value_width(self):
+ if not self.value:
+ return 0
+
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ width = font.getsize_multiline(self.value)[0] + 1
+
+ min_width = self.style.get("min-height")
+ if min_width and min_width > width:
+ width = min_width
+
+ return int(width)
+
+ def value_height(self):
+ if not self.value:
+ return 0
+
+ height = self.style.get("height")
+ if height:
+ return int(height)
+
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+ height = font.getsize_multiline(self.value)[1] + 1
+
+ min_height = self.style.get("min-height")
+ if min_height and min_height > height:
+ height = min_height
+
+ return int(height)
+
+ @property
+ def item_pos_x(self):
+ pos_x, pos_y, width, height = (
+ self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
+ )
+ return pos_x
+
+ @property
+ def item_pos_y(self):
+ pos_x, pos_y, width, height = (
+ self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
+ )
+ return pos_y
+
+ @property
+ def value_pos_x(self):
+ pos_x, pos_y, width, height = (
+ self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
+ )
+ alignment_hor = self.style["alignment-horizontal"].lower()
+ if alignment_hor in ["center", "centre"]:
+ pos_x += (width - self.value_width()) / 2
+
+ elif alignment_hor == "right":
+ pos_x += width - self.value_width()
+
+ else:
+ padding = self.style["padding"]
+ padding_left = self.style.get("padding-left")
+ if padding_left is None:
+ padding_left = padding
+
+ pos_x += padding_left
+
+ return int(pos_x)
+
+ @property
+ def value_pos_y(self):
+ pos_x, pos_y, width, height = (
+ self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
+ )
+
+ alignment_ver = self.style["alignment-vertical"].lower()
+ if alignment_ver in ["center", "centre"]:
+ pos_y += (height - self.value_height()) / 2
+
+ elif alignment_ver == "bottom":
+ pos_y += height - self.value_height()
+
+ else:
+ padding = self.style["padding"]
+ padding_top = self.style.get("padding-top")
+ if padding_top is None:
+ padding_top = padding
+
+ pos_y += padding_top
+
+ return int(pos_y)
+
+ def draw(self, image, drawer):
+ pos_x, pos_y, width, height = (
+ self.parent.content_pos_info_by_cord(self.row_idx, self.col_idx)
+ )
+ pos_start = (pos_x, pos_y)
+ pos_end = (pos_x + width, pos_y + height)
+ bg_color = self.style["bg-color"]
+ if self.parent.use_alternate_color and (self.row_idx % 2) == 1:
+ bg_color = self.style["bg-alter-color"]
+
+ if bg_color and bg_color.lower() != "transparent":
+ # TODO border outline styles
+ drawer.rectangle(
+ (pos_start, pos_end),
+ fill=bg_color,
+ outline=None
+ )
+
+ font_color = self.style["font-color"]
+ font_family = self.style["font-family"]
+ font_size = self.style["font-size"]
+ font_bold = self.style.get("font-bold", False)
+ font_italic = self.style.get("font-italic", False)
+
+ font = FontFactory.get_font(
+ font_family, font_size, font_italic, font_bold
+ )
+
+ alignment_hor = self.style["alignment-horizontal"].lower()
+ if alignment_hor == "centre":
+ alignment_hor = "center"
+
+ drawer.multiline_text(
+ self.value_pos_start,
+ self.value,
+ font=font,
+ fill=font_color,
+ align=alignment_hor
+ )
diff --git a/pype/scripts/slates/slate_base/layer.py b/pype/scripts/slates/slate_base/layer.py
new file mode 100644
index 0000000000..ea3a3de53e
--- /dev/null
+++ b/pype/scripts/slates/slate_base/layer.py
@@ -0,0 +1,139 @@
+from .base import BaseObj
+
+
+class Layer(BaseObj):
+ obj_type = "layer"
+ available_parents = ["main_frame", "layer"]
+
+ # Direction can be 0=vertical/ 1=horizontal
+ def __init__(self, direction=0, *args, **kwargs):
+ super(Layer, self).__init__(*args, **kwargs)
+ self._direction = direction
+
+ @property
+ def item_pos_x(self):
+ if self.parent.obj_type == self.obj_type:
+ pos_x = self.parent.child_pos_x(self.id)
+ elif self.parent.obj_type == "main_frame":
+ pos_x = self._pos_x
+ else:
+ pos_x = self.parent.value_pos_x
+ return int(pos_x)
+
+ @property
+ def item_pos_y(self):
+ if self.parent.obj_type == self.obj_type:
+ pos_y = self.parent.child_pos_y(self.id)
+ elif self.parent.obj_type == "main_frame":
+ pos_y = self._pos_y
+ else:
+ pos_y = self.parent.value_pos_y
+
+ return int(pos_y)
+
+ @property
+ def direction(self):
+ if self._direction not in (0, 1):
+ self.log.warning((
+ "Direction of Layer must be 0 or 1 "
+ "(0 is horizontal / 1 is vertical)! Setting to 0."
+ ))
+ return 0
+ return self._direction
+
+ def child_pos_x(self, item_id):
+ pos_x = self.value_pos_x
+ alignment_hor = self.style["alignment-horizontal"].lower()
+
+ item = None
+ for id, _item in self.items.items():
+ if item_id == id:
+ item = _item
+ break
+
+ if self.direction == 1:
+ for id, _item in self.items.items():
+ if item_id == id:
+ break
+
+ pos_x += _item.width()
+ if _item.obj_type not in ["image", "placeholder"]:
+ pos_x += 1
+
+ else:
+ if alignment_hor in ["center", "centre"]:
+ pos_x += (self.content_width() - item.content_width()) / 2
+
+ elif alignment_hor == "right":
+ pos_x += self.content_width() - item.content_width()
+
+ else:
+ margin = self.style["margin"]
+ margin_left = self.style.get("margin-left") or margin
+ pos_x += margin_left
+
+ return int(pos_x)
+
+ def child_pos_y(self, item_id):
+ pos_y = self.value_pos_y
+ alignment_ver = self.style["alignment-horizontal"].lower()
+
+ item = None
+ for id, _item in self.items.items():
+ if item_id == id:
+ item = _item
+ break
+
+ if self.direction != 1:
+ for id, item in self.items.items():
+ if item_id == id:
+ break
+ pos_y += item.height()
+ if item.obj_type not in ["image", "placeholder"]:
+ pos_y += 1
+
+ else:
+ if alignment_ver in ["center", "centre"]:
+ pos_y += (self.content_height() - item.content_height()) / 2
+
+ elif alignment_ver == "bottom":
+ pos_y += self.content_height() - item.content_height()
+
+ return int(pos_y)
+
+ def value_height(self):
+ height = 0
+ for item in self.items.values():
+ if self.direction == 1:
+ if height > item.height():
+ continue
+ # times 1 because won't get object pointer but number
+ height = item.height()
+ else:
+ height += item.height()
+
+ # TODO this is not right
+ min_height = self.style.get("min-height")
+ if min_height and min_height > height:
+ return min_height
+ return height
+
+ def value_width(self):
+ width = 0
+ for item in self.items.values():
+ if self.direction == 0:
+ if width > item.width():
+ continue
+ # times 1 because won't get object pointer but number
+ width = item.width()
+ else:
+ width += item.width()
+
+ min_width = self.style.get("min-width")
+ if min_width and min_width > width:
+ return min_width
+ return width
+
+ def draw(self, image, drawer):
+ for item in self.items.values():
+ item.draw(image, drawer)
diff --git a/pype/scripts/slates/slate_base/lib.py b/pype/scripts/slates/slate_base/lib.py
new file mode 100644
index 0000000000..d9f8ad6d42
--- /dev/null
+++ b/pype/scripts/slates/slate_base/lib.py
@@ -0,0 +1,152 @@
+import os
+import json
+import logging
+try:
+ from queue import Queue
+except Exception:
+ from Queue import Queue
+
+from .main_frame import MainFrame
+from .layer import Layer
+from .items import (
+ ItemTable, ItemImage, ItemRectangle, ItemPlaceHolder
+)
+
+try:
+ from pypeapp.config import get_presets
+except Exception:
+ get_presets = dict
+
+log = logging.getLogger(__name__)
+
+
+RequiredSlateKeys = ["width", "height", "destination_path"]
+
+
+# TODO proper documentation
+def create_slates(
+ fill_data, slate_name=None, slate_data=None, data_output_json=None
+):
+ """Implmentation for command line executing.
+
+ Data for slates are by defaule taken from presets. That requires to enter,
+ `slate_name`. If `slate_data` are entered then they are used.
+
+ `data_output` should be path to json file where data will be collected.
+ """
+ if slate_data is None and slate_name is None:
+ raise TypeError(
+ "`create_slates` expects to enter data for slates or name"
+ " of slate preset."
+ )
+
+ elif slate_data is None:
+ slate_presets = (
+ get_presets()
+ .get("tools", {})
+ .get("slates")
+ ) or {}
+ slate_data = slate_presets.get(slate_name)
+ if slate_data is None:
+ raise ValueError(
+ "Preset name \"{}\" was not found in slate presets.".format(
+ slate_name
+ )
+ )
+
+ missing_keys = []
+ for key in RequiredSlateKeys:
+ if key not in slate_data:
+ missing_keys.append("`{}`".format(key))
+
+ if missing_keys:
+ log.error("Slate data of <{}> miss required keys: {}".format(
+ slate_name, ", ".join(missing_keys)
+ ))
+ return False
+
+ width = slate_data["width"]
+ height = slate_data["height"]
+ dst_path = slate_data["destination_path"]
+ style = slate_data.get("style") or {}
+
+ main = MainFrame(width, height, dst_path, fill_data, style=style)
+
+ load_queue = Queue()
+ for item in slate_data["items"]:
+ load_queue.put((item, main))
+
+ while not load_queue.empty():
+ item_data, parent = load_queue.get()
+
+ item_type = item_data["type"].lower()
+ item_style = item_data.get("style", {})
+ item_name = item_data.get("name")
+
+ pos_x = item_data.get("pos_x")
+ pos_y = item_data.get("pos_y")
+ if parent.obj_type != "main_frame":
+ if pos_x or pos_y:
+ # TODO logging
+ log.warning((
+ "You have specified `pos_x` and `pos_y` but won't be used."
+ " Possible only if parent of an item is `main_frame`."
+ ))
+ pos_x = None
+ pos_y = None
+
+ kwargs = {
+ "parent": parent,
+ "style": item_style,
+ "name": item_name,
+ "pos_x": pos_x,
+ "pos_y": pos_y
+ }
+
+ if item_type == "layer":
+ direction = item_data.get("direction", 0)
+ item_obj = Layer(direction, **kwargs)
+ for item in item_data.get("items", []):
+ load_queue.put((item, item_obj))
+
+ elif item_type == "table":
+ use_alternate_color = item_data.get("use_alternate_color", False)
+ values = item_data.get("values") or []
+ ItemTable(values, use_alternate_color, **kwargs)
+
+ elif item_type == "image":
+ path = item_data["path"]
+ ItemImage(path, **kwargs)
+
+ elif item_type == "rectangle":
+ ItemRectangle(**kwargs)
+
+ elif item_type == "placeholder":
+ path = item_data["path"]
+ ItemPlaceHolder(path, **kwargs)
+
+ else:
+ # TODO logging
+ log.warning(
+ "Not implemented object type `{}` - skipping".format(item_type)
+ )
+
+ main.draw()
+ log.debug("Slate creation finished")
+
+ if not data_output_json:
+ return
+
+ if not data_output_json.endswith(".json"):
+ raise ValueError("Output path must be .json file.")
+
+ data_output_json_dir = os.path.dirname(data_output_json)
+ if not os.path.exists(data_output_json_dir):
+ log.info("Creating folder \"{}\"".format(data_output_json_dir))
+ os.makedirs(data_output_json_dir)
+
+ output_data = main.collect_data()
+ with open(data_output_json, "w") as json_file:
+ json_file.write(json.dumps(output_data, indent=4))
+
+ log.info("Metadata collected in \"{}\".".format(data_output_json))
diff --git a/pype/scripts/slates/slate_base/main_frame.py b/pype/scripts/slates/slate_base/main_frame.py
new file mode 100644
index 0000000000..837e752aae
--- /dev/null
+++ b/pype/scripts/slates/slate_base/main_frame.py
@@ -0,0 +1,77 @@
+import os
+import re
+from PIL import Image, ImageDraw
+
+from .base import BaseObj
+
+
+class MainFrame(BaseObj):
+
+ obj_type = "main_frame"
+ available_parents = [None]
+
+ def __init__(
+ self, width, height, destination_path, fill_data={}, *args, **kwargs
+ ):
+ kwargs["parent"] = None
+ super(MainFrame, self).__init__(*args, **kwargs)
+ self._width = width
+ self._height = height
+ self.dst_path = destination_path
+ self._fill_data = fill_data
+ self.fill_data_format()
+
+ def fill_data_format(self):
+ if re.match(self.fill_data_regex, self.dst_path):
+ self.dst_path = self.dst_path.format(**self.fill_data)
+
+ @property
+ def fill_data(self):
+ return self._fill_data
+
+ def value_width(self):
+ width = 0
+ for item in self.items.values():
+ width += item.width()
+ return width
+
+ def value_height(self):
+ height = 0
+ for item in self.items.values():
+ height += item.height()
+ return height
+
+ def width(self):
+ return self._width
+
+ def height(self):
+ return self._height
+
+ def draw(self, path=None):
+ dir_path = os.path.dirname(self.dst_path)
+ if not os.path.exists(dir_path):
+ os.makedirs(dir_path)
+
+ bg_color = self.style["bg-color"]
+ image = Image.new("RGB", (self.width(), self.height()), color=bg_color)
+ drawer = ImageDraw.Draw(image)
+ for item in self.items.values():
+ item.draw(image, drawer)
+
+ image.save(self.dst_path)
+ self.reset()
+
+ def collect_data(self):
+ output = {}
+ output["width"] = self.width()
+ output["height"] = self.height()
+ output["slate_path"] = self.dst_path
+
+ placeholders = self.find_item(obj_type="placeholder")
+ placeholders_data = []
+ for placeholder in placeholders:
+ placeholders_data.append(placeholder.collect_data())
+
+ output["placeholders"] = placeholders_data
+
+ return output
diff --git a/pype/services/adobe_communicator/__init__.py b/pype/services/adobe_communicator/__init__.py
new file mode 100644
index 0000000000..4110ab69b5
--- /dev/null
+++ b/pype/services/adobe_communicator/__init__.py
@@ -0,0 +1,5 @@
+from .adobe_comunicator import AdobeCommunicator
+
+
+def tray_init(tray_widget, main_widget):
+ return AdobeCommunicator()
diff --git a/pype/services/adobe_communicator/adobe_comunicator.py b/pype/services/adobe_communicator/adobe_comunicator.py
new file mode 100644
index 0000000000..d842955781
--- /dev/null
+++ b/pype/services/adobe_communicator/adobe_comunicator.py
@@ -0,0 +1,49 @@
+import os
+import pype
+from pypeapp import Logger
+from .lib import AdobeRestApi, PUBLISH_PATHS
+
+log = Logger().get_logger("AdobeCommunicator")
+
+
+class AdobeCommunicator:
+ rest_api_obj = None
+
+ def __init__(self):
+ self.rest_api_obj = None
+
+ # Add "adobecommunicator" publish paths
+ PUBLISH_PATHS.append(os.path.sep.join(
+ [pype.PLUGINS_DIR, "adobecommunicator", "publish"]
+ ))
+
+ def tray_start(self):
+ return
+
+ def process_modules(self, modules):
+ # Module requires RestApiServer
+ rest_api_module = modules.get("RestApiServer")
+ if not rest_api_module:
+ log.warning(
+ "AdobeCommunicator won't work without RestApiServer."
+ )
+ return
+
+ # Register statics url
+ pype_module_root = os.environ["PYPE_MODULE_ROOT"].replace("\\", "/")
+ static_path = "{}/pype/premiere/ppro".format(pype_module_root)
+ rest_api_module.register_statics("/ppro", static_path)
+
+ # Register rest api object for communication
+ self.rest_api_obj = AdobeRestApi()
+
+ # Add Ftrack publish path if registered Ftrack mdule
+ if "FtrackModule" in modules:
+ PUBLISH_PATHS.append(os.path.sep.join(
+ [pype.PLUGINS_DIR, "ftrack", "publish"]
+ ))
+
+ log.debug((
+ f"Adobe Communicator Registered PUBLISH_PATHS"
+ f"> `{PUBLISH_PATHS}`"
+ ))
diff --git a/pype/services/adobe_communicator/lib/__init__.py b/pype/services/adobe_communicator/lib/__init__.py
new file mode 100644
index 0000000000..23aee81275
--- /dev/null
+++ b/pype/services/adobe_communicator/lib/__init__.py
@@ -0,0 +1,8 @@
+from .io_nonsingleton import DbConnector
+from .rest_api import AdobeRestApi, PUBLISH_PATHS
+
+__all__ = [
+ "PUBLISH_PATHS",
+ "DbConnector",
+ "AdobeRestApi"
+]
diff --git a/pype/aport/io_nonsingleton.py b/pype/services/adobe_communicator/lib/io_nonsingleton.py
similarity index 93%
rename from pype/aport/io_nonsingleton.py
rename to pype/services/adobe_communicator/lib/io_nonsingleton.py
index ddda21a570..6380e4eb23 100644
--- a/pype/aport/io_nonsingleton.py
+++ b/pype/services/adobe_communicator/lib/io_nonsingleton.py
@@ -15,7 +15,7 @@ import functools
import contextlib
from avalon import schema
-import requests
+from avalon.vendor import requests
# Third-party dependencies
import pymongo
@@ -43,12 +43,25 @@ class DbConnector(object):
log = logging.getLogger(__name__)
def __init__(self):
- self.Session = {}
- self._mongo_client = None
- self._sentry_client = None
- self._sentry_logging_handler = None
- self._database = None
- self._is_installed = False
+ self.Session = {}
+ self._mongo_client = None
+ self._sentry_client = None
+ self._sentry_logging_handler = None
+ self._database = None
+ self._is_installed = False
+
+ def __getitem__(self, key):
+ # gives direct access to collection withou setting `active_table`
+ return self._database[key]
+
+ def __getattribute__(self, attr):
+ # not all methods of PyMongo database are implemented with this it is
+ # possible to use them too
+ try:
+ return super(DbConnector, self).__getattribute__(attr)
+ except AttributeError:
+ cur_proj = self.Session["AVALON_PROJECT"]
+ return self._database[cur_proj].__getattribute__(attr)
def install(self):
"""Establish a persistent connection to the database"""
@@ -192,7 +205,7 @@ class DbConnector(object):
) if os.getenv(item[0], item[1]) is not None
}
- Session["schema"] = "avalon-core:session-1.0"
+ Session["schema"] = "avalon-core:session-2.0"
try:
schema.validate(Session)
except schema.ValidationError as e:
@@ -320,6 +333,7 @@ class DbConnector(object):
@auto_reconnect
def find_one(self, filter, projection=None, sort=None):
assert isinstance(filter, dict), "filter must be "
+
return self._database[self.Session["AVALON_PROJECT"]].find_one(
filter=filter,
projection=projection,
diff --git a/pype/services/adobe_communicator/lib/publish.py b/pype/services/adobe_communicator/lib/publish.py
new file mode 100644
index 0000000000..2e7d993a60
--- /dev/null
+++ b/pype/services/adobe_communicator/lib/publish.py
@@ -0,0 +1,57 @@
+import os
+import sys
+import pype
+import importlib
+import pyblish.api
+import pyblish.util
+import avalon.api
+from avalon.tools import publish
+from pypeapp import Logger
+
+log = Logger().get_logger(__name__)
+
+
+def main(env):
+ # Registers pype's Global pyblish plugins
+ pype.install()
+
+ # Register Host (and it's pyblish plugins)
+ host_name = env["AVALON_APP"]
+ # TODO not sure if use "pype." or "avalon." for host import
+ host_import_str = f"avalon.{host_name}"
+
+ try:
+ host_module = importlib.import_module(host_import_str)
+ except ModuleNotFoundError:
+ log.error((
+ f"Host \"{host_name}\" can't be imported."
+ f" Import string \"{host_import_str}\" failed."
+ ))
+ return False
+
+ avalon.api.install(host_module)
+
+ # Register additional paths
+ addition_paths_str = env.get("PUBLISH_PATHS") or ""
+ addition_paths = addition_paths_str.split(os.pathsep)
+ for path in addition_paths:
+ path = os.path.normpath(path)
+ if not os.path.exists(path):
+ continue
+
+ pyblish.api.register_plugin_path(path)
+
+ # Register project specific plugins
+ project_name = os.environ["AVALON_PROJECT"]
+ project_plugins_paths = env.get("PYPE_PROJECT_PLUGINS") or ""
+ for path in project_plugins_paths.split(os.pathsep):
+ plugin_path = os.path.join(path, project_name, "plugins")
+ if os.path.exists(plugin_path):
+ pyblish.api.register_plugin_path(plugin_path)
+
+ return publish.show()
+
+
+if __name__ == "__main__":
+ result = main(os.environ)
+ sys.exit(not bool(result))
diff --git a/pype/services/adobe_communicator/lib/rest_api.py b/pype/services/adobe_communicator/lib/rest_api.py
new file mode 100644
index 0000000000..2372c4ed20
--- /dev/null
+++ b/pype/services/adobe_communicator/lib/rest_api.py
@@ -0,0 +1,117 @@
+import os
+import sys
+import copy
+from pype.services.rest_api import RestApi, route, abort, CallbackResult
+from .io_nonsingleton import DbConnector
+from pypeapp import config, execute, Logger
+
+log = Logger().get_logger("AdobeCommunicator")
+
+CURRENT_DIR = os.path.dirname(__file__)
+PUBLISH_SCRIPT_PATH = os.path.join(CURRENT_DIR, "publish.py")
+
+PUBLISH_PATHS = []
+
+
+class AdobeRestApi(RestApi):
+ dbcon = DbConnector()
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.dbcon.install()
+
+ @route("/available", "/adobe")
+ def available(self):
+ return CallbackResult()
+
+ @route("/presets/", "/adobe")
+ def get_presets(self, request):
+ project_name = request.url_data["project_name"]
+ return CallbackResult(data=config.get_presets(project_name))
+
+ @route("/publish", "/adobe", "POST")
+ def publish(self, request):
+ """Triggers publishing script in subprocess.
+
+ The subprocess freeze process and during publishing is not possible to
+ handle other requests and is possible that freeze main application.
+
+ TODO: Freezing issue may be fixed with socket communication.
+
+ Example url:
+ http://localhost:8021/adobe/publish (POST)
+ """
+ try:
+ publish_env = self._prepare_publish_environments(
+ request.request_data
+ )
+ except Exception as exc:
+ log.warning(
+ "Failed to prepare environments for publishing.",
+ exc_info=True
+ )
+ abort(400, str(exc))
+
+ output_data_path = publish_env["AC_PUBLISH_OUTPATH"]
+
+ log.info("Pyblish is running")
+ try:
+ # Trigger subprocess
+ # QUESTION should we check returncode?
+ returncode = execute(
+ [sys.executable, PUBLISH_SCRIPT_PATH],
+ env=publish_env
+ )
+
+ # Check if output file exists
+ if returncode != 0 or not os.path.exists(output_data_path):
+ abort(500, "Publishing failed")
+
+ log.info("Pyblish have stopped")
+
+ return CallbackResult(
+ data={"return_data_path": output_data_path}
+ )
+
+ except Exception:
+ log.warning("Publishing failed", exc_info=True)
+ abort(500, "Publishing failed")
+
+ def _prepare_publish_environments(self, data):
+ """Prepares environments based on request data."""
+ env = copy.deepcopy(os.environ)
+
+ project_name = data["project"]
+ asset_name = data["asset"]
+
+ project_doc = self.dbcon[project_name].find_one({
+ "type": "project"
+ })
+ av_asset = self.dbcon[project_name].find_one({
+ "type": "asset",
+ "name": asset_name
+ })
+ parents = av_asset["data"]["parents"]
+ hierarchy = ""
+ if parents:
+ hierarchy = "/".join(parents)
+
+ env["AVALON_PROJECT"] = project_name
+ env["AVALON_ASSET"] = asset_name
+ env["AVALON_TASK"] = data["task"]
+ env["AVALON_WORKDIR"] = data["workdir"]
+ env["AVALON_HIERARCHY"] = hierarchy
+ env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "")
+ env["AVALON_APP"] = data["AVALON_APP"]
+ env["AVALON_APP_NAME"] = data["AVALON_APP_NAME"]
+
+ env["PYBLISH_HOSTS"] = data["AVALON_APP"]
+
+ env["PUBLISH_PATHS"] = os.pathsep.join(PUBLISH_PATHS)
+
+ # Input and Output paths where source data and result data will be
+ # stored
+ env["AC_PUBLISH_INPATH"] = data["adobePublishJsonPathSend"]
+ env["AC_PUBLISH_OUTPATH"] = data["adobePublishJsonPathGet"]
+
+ return env
diff --git a/pype/services/rest_api/lib/factory.py b/pype/services/rest_api/lib/factory.py
index 2b94d498ff..39ea8474fa 100644
--- a/pype/services/rest_api/lib/factory.py
+++ b/pype/services/rest_api/lib/factory.py
@@ -3,6 +3,7 @@ import re
import inspect
import collections
from .lib import RestMethods
+from queue import Queue
from pypeapp import Logger
@@ -208,7 +209,7 @@ class _RestApiFactory:
"""
registered_objs = []
unprocessed_routes = []
- unprocessed_statics = []
+ unprocessed_statics = Queue()
prepared_routes = {
method: collections.defaultdict(list) for method in RestMethods
@@ -225,11 +226,6 @@ class _RestApiFactory:
self.unprocessed_routes.index(route)
)
- def _process_statics(self, item):
- return self.unprocessed_statics.pop(
- self.unprocessed_statics.index(item)
- )
-
def register_route(
self, path, callback, url_prefix, methods, strict_match
):
@@ -251,7 +247,7 @@ class _RestApiFactory:
def register_statics(self, item):
log.debug("Registering statics path \"{}\"".format(item))
- self.unprocessed_statics.append(item)
+ self.unprocessed_statics.put(item)
def _prepare_route(self, route):
"""Prepare data of registered callbacks for routes.
@@ -290,8 +286,9 @@ class _RestApiFactory:
methods has `__self__` or are defined in (it is expeted they
do not requise access to object)
"""
- for url_prefix, dir_path in self.unprocessed_statics:
- self._process_statics((url_prefix, dir_path))
+
+ while not self.unprocessed_statics.empty():
+ url_prefix, dir_path = self.unprocessed_statics.get()
dir_path = os.path.normpath(dir_path)
if not os.path.exists(dir_path):
log.warning(
@@ -314,7 +311,7 @@ class _RestApiFactory:
if not method.restapi:
continue
- for route in self.unprocessed_routes:
+ for route in list(self.unprocessed_routes):
callback = route["callback"]
if not (
callback.__qualname__ == method.__qualname__ and
@@ -330,7 +327,7 @@ class _RestApiFactory:
self._prepare_route(route)
break
- for route in self.unprocessed_routes:
+ for route in list(self.unprocessed_routes):
callback = route["callback"]
is_class_method = len(callback.__qualname__.split(".")) != 1
if is_class_method:
diff --git a/pype/services/rest_api/lib/handler.py b/pype/services/rest_api/lib/handler.py
index 732061c005..dc94808237 100644
--- a/pype/services/rest_api/lib/handler.py
+++ b/pype/services/rest_api/lib/handler.py
@@ -252,7 +252,14 @@ class Handler(http.server.SimpleHTTPRequestHandler):
content_length = int(cont_len)
in_data_str = self.rfile.read(content_length)
if in_data_str:
- in_data = json.loads(in_data_str)
+ try:
+ in_data = json.loads(in_data_str)
+ except Exception as e:
+ log.error("Invalid JSON recieved:")
+ log.error("-" * 80)
+ log.error(in_data_str)
+ log.error("-" * 80)
+ raise Exception("Invalid JSON recieved") from e
request_info = RequestInfo(
url_data=url_data,
diff --git a/pype/services/rest_api/rest_api.py b/pype/services/rest_api/rest_api.py
index 21c4f7f51f..d600a3a8a8 100644
--- a/pype/services/rest_api/rest_api.py
+++ b/pype/services/rest_api/rest_api.py
@@ -1,8 +1,9 @@
import os
import socket
-import socketserver
from Qt import QtCore
+from socketserver import ThreadingMixIn
+from http.server import HTTPServer
from .lib import RestApiFactory, Handler
from .base_class import route, register_statics
from pypeapp import config, Logger
@@ -10,6 +11,10 @@ from pypeapp import config, Logger
log = Logger().get_logger("RestApiServer")
+class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
+ pass
+
+
class RestApiServer:
"""Rest Api allows to access statics or callbacks with http requests.
@@ -180,7 +185,8 @@ class RestApiThread(QtCore.QThread):
"Running Rest Api server on URL:"
" \"http://localhost:{}\"".format(self.port)
)
- with socketserver.TCPServer(("", self.port), Handler) as httpd:
+
+ with ThreadingSimpleServer(("", self.port), Handler) as httpd:
while self.is_running:
httpd.handle_request()
except Exception:
diff --git a/pype/unreal/__init__.py b/pype/unreal/__init__.py
new file mode 100644
index 0000000000..bb8a765a43
--- /dev/null
+++ b/pype/unreal/__init__.py
@@ -0,0 +1,45 @@
+import os
+import logging
+
+from avalon import api as avalon
+from pyblish import api as pyblish
+
+logger = logging.getLogger("pype.unreal")
+
+PARENT_DIR = os.path.dirname(__file__)
+PACKAGE_DIR = os.path.dirname(PARENT_DIR)
+PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+PUBLISH_PATH = os.path.join(PLUGINS_DIR, "unreal", "publish")
+LOAD_PATH = os.path.join(PLUGINS_DIR, "unreal", "load")
+CREATE_PATH = os.path.join(PLUGINS_DIR, "unreal", "create")
+
+
+def install():
+ """Install Unreal configuration for Avalon."""
+ print("-=" * 40)
+ logo = '''.
+.
+ ____________
+ / \\ __ \\
+ \\ \\ \\/_\\ \\
+ \\ \\ _____/ ______
+ \\ \\ \\___// \\ \\
+ \\ \\____\\ \\ \\_____\\
+ \\/_____/ \\/______/ PYPE Club .
+.
+'''
+ print(logo)
+ print("installing Pype for Unreal ...")
+ print("-=" * 40)
+ logger.info("installing Pype for Unreal")
+ pyblish.register_plugin_path(str(PUBLISH_PATH))
+ avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH))
+ avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH))
+
+
+def uninstall():
+ """Uninstall Unreal configuration for Avalon."""
+ pyblish.deregister_plugin_path(str(PUBLISH_PATH))
+ avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH))
+ avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH))
diff --git a/pype/unreal/lib.py b/pype/unreal/lib.py
new file mode 100644
index 0000000000..0b049c8b1d
--- /dev/null
+++ b/pype/unreal/lib.py
@@ -0,0 +1,425 @@
+import sys
+import os
+import platform
+import json
+from distutils import dir_util
+import subprocess
+from pypeapp import config
+
+
+def get_engine_versions():
+ """
+ This will try to detect location and versions of installed Unreal Engine.
+ Location can be overridden by `UNREAL_ENGINE_LOCATION` environment
+ variable.
+
+ Returns:
+
+ dict: dictionary with version as a key and dir as value.
+
+ Example:
+
+ >>> get_engine_version()
+ {
+ "4.23": "C:/Epic Games/UE_4.23",
+ "4.24": "C:/Epic Games/UE_4.24"
+ }
+ """
+ try:
+ engine_locations = {}
+ root, dirs, files = next(os.walk(os.environ["UNREAL_ENGINE_LOCATION"]))
+
+ for dir in dirs:
+ if dir.startswith("UE_"):
+ ver = dir.split("_")[1]
+ engine_locations[ver] = os.path.join(root, dir)
+ except KeyError:
+ # environment variable not set
+ pass
+ except OSError:
+ # specified directory doesn't exists
+ pass
+
+ # if we've got something, terminate autodetection process
+ if engine_locations:
+ return engine_locations
+
+ # else kick in platform specific detection
+ if platform.system().lower() == "windows":
+ return _win_get_engine_versions()
+ elif platform.system().lower() == "linux":
+ # on linux, there is no installation and getting Unreal Engine involves
+ # git clone. So we'll probably depend on `UNREAL_ENGINE_LOCATION`.
+ pass
+ elif platform.system().lower() == "darwin":
+ return _darwin_get_engine_version()
+
+ return {}
+
+
+def _win_get_engine_versions():
+ """
+ If engines are installed via Epic Games Launcher then there is:
+ `%PROGRAMDATA%/Epic/UnrealEngineLauncher/LauncherInstalled.dat`
+ This file is JSON file listing installed stuff, Unreal engines
+ are marked with `"AppName" = "UE_X.XX"`` like `UE_4.24`
+ """
+ install_json_path = os.path.join(
+ os.environ.get("PROGRAMDATA"),
+ "Epic",
+ "UnrealEngineLauncher",
+ "LauncherInstalled.dat",
+ )
+
+ return _parse_launcher_locations(install_json_path)
+
+
+def _darwin_get_engine_version() -> dict:
+ """
+ It works the same as on Windows, just JSON file location is different.
+ """
+ install_json_path = os.path.join(
+ os.environ.get("HOME"),
+ "Library",
+ "Application Support",
+ "Epic",
+ "UnrealEngineLauncher",
+ "LauncherInstalled.dat",
+ )
+
+ return _parse_launcher_locations(install_json_path)
+
+
+def _parse_launcher_locations(install_json_path: str) -> dict:
+ """
+ This will parse locations from json file.
+
+ :param install_json_path: path to `LauncherInstalled.dat`
+ :type install_json_path: str
+ :returns: returns dict with unreal engine versions as keys and
+ paths to those engine installations as value.
+ :rtype: dict
+ """
+ engine_locations = {}
+ if os.path.isfile(install_json_path):
+ with open(install_json_path, "r") as ilf:
+ try:
+ install_data = json.load(ilf)
+ except json.JSONDecodeError:
+ raise Exception(
+ "Invalid `LauncherInstalled.dat file. `"
+ "Cannot determine Unreal Engine location."
+ )
+
+ for installation in install_data.get("InstallationList", []):
+ if installation.get("AppName").startswith("UE_"):
+ ver = installation.get("AppName").split("_")[1]
+ engine_locations[ver] = installation.get("InstallLocation")
+
+ return engine_locations
+
+
+def create_unreal_project(project_name: str,
+ ue_version: str,
+ pr_dir: str,
+ engine_path: str,
+ dev_mode: bool = False) -> None:
+ """
+ This will create `.uproject` file at specified location. As there is no
+ way I know to create project via command line, this is easiest option.
+ Unreal project file is basically JSON file. If we find
+ `AVALON_UNREAL_PLUGIN` environment variable we assume this is location
+ of Avalon Integration Plugin and we copy its content to project folder
+ and enable this plugin.
+
+ :param project_name: project name
+ :type project_name: str
+ :param ue_version: unreal engine version (like 4.23)
+ :type ue_version: str
+ :param pr_dir: path to directory where project will be created
+ :type pr_dir: str
+ :param engine_path: Path to Unreal Engine installation
+ :type engine_path: str
+ :param dev_mode: Flag to trigger C++ style Unreal project needing
+ Visual Studio and other tools to compile plugins from
+ sources. This will trigger automatically if `Binaries`
+ directory is not found in plugin folders as this indicates
+ this is only source distribution of the plugin. Dev mode
+ is also set by preset file `unreal/project_setup.json` in
+ **PYPE_CONFIG**.
+ :type dev_mode: bool
+ :returns: None
+ """
+ preset = config.get_presets()["unreal"]["project_setup"]
+
+ if os.path.isdir(os.environ.get("AVALON_UNREAL_PLUGIN", "")):
+ # copy plugin to correct path under project
+ plugins_path = os.path.join(pr_dir, "Plugins")
+ avalon_plugin_path = os.path.join(plugins_path, "Avalon")
+ if not os.path.isdir(avalon_plugin_path):
+ os.makedirs(avalon_plugin_path, exist_ok=True)
+ dir_util._path_created = {}
+ dir_util.copy_tree(os.environ.get("AVALON_UNREAL_PLUGIN"),
+ avalon_plugin_path)
+
+ if (not os.path.isdir(os.path.join(avalon_plugin_path, "Binaries"))
+ or not os.path.join(avalon_plugin_path, "Intermediate")):
+ dev_mode = True
+
+ # data for project file
+ data = {
+ "FileVersion": 3,
+ "EngineAssociation": ue_version,
+ "Category": "",
+ "Description": "",
+ "Plugins": [
+ {"Name": "PythonScriptPlugin", "Enabled": True},
+ {"Name": "EditorScriptingUtilities", "Enabled": True},
+ {"Name": "Avalon", "Enabled": True}
+ ]
+ }
+
+ if preset["install_unreal_python_engine"]:
+ # If `PYPE_UNREAL_ENGINE_PYTHON_PLUGIN` is set, copy it from there to
+ # support offline installation.
+ # Otherwise clone UnrealEnginePython to Plugins directory
+ # https://github.com/20tab/UnrealEnginePython.git
+ uep_path = os.path.join(plugins_path, "UnrealEnginePython")
+ if os.environ.get("PYPE_UNREAL_ENGINE_PYTHON_PLUGIN"):
+
+ os.makedirs(uep_path, exist_ok=True)
+ dir_util._path_created = {}
+ dir_util.copy_tree(
+ os.environ.get("PYPE_UNREAL_ENGINE_PYTHON_PLUGIN"),
+ uep_path)
+ else:
+ # WARNING: this will trigger dev_mode, because we need to compile
+ # this plugin.
+ dev_mode = True
+ import git
+ git.Repo.clone_from(
+ "https://github.com/20tab/UnrealEnginePython.git",
+ uep_path)
+
+ data["Plugins"].append(
+ {"Name": "UnrealEnginePython", "Enabled": True})
+
+ if (not os.path.isdir(os.path.join(uep_path, "Binaries"))
+ or not os.path.join(uep_path, "Intermediate")):
+ dev_mode = True
+
+ if dev_mode or preset["dev_mode"]:
+ # this will add project module and necessary source file to make it
+ # C++ project and to (hopefully) make Unreal Editor to compile all
+ # sources at start
+
+ data["Modules"] = [{
+ "Name": project_name,
+ "Type": "Runtime",
+ "LoadingPhase": "Default",
+ "AdditionalDependencies": ["Engine"],
+ }]
+
+ if preset["install_unreal_python_engine"]:
+ # now we need to fix python path in:
+ # `UnrealEnginePython.Build.cs`
+ # to point to our python
+ with open(os.path.join(
+ uep_path, "Source",
+ "UnrealEnginePython",
+ "UnrealEnginePython.Build.cs"), mode="r") as f:
+ build_file = f.read()
+
+ fix = build_file.replace(
+ 'private string pythonHome = "";',
+ 'private string pythonHome = "{}";'.format(
+ sys.base_prefix.replace("\\", "/")))
+
+ with open(os.path.join(
+ uep_path, "Source",
+ "UnrealEnginePython",
+ "UnrealEnginePython.Build.cs"), mode="w") as f:
+ f.write(fix)
+
+ # write project file
+ project_file = os.path.join(pr_dir, "{}.uproject".format(project_name))
+ with open(project_file, mode="w") as pf:
+ json.dump(data, pf, indent=4)
+
+ # ensure we have PySide installed in engine
+ # TODO: make it work for other platforms 🍎 🐧
+ if platform.system().lower() == "windows":
+ python_path = os.path.join(engine_path, "Engine", "Binaries",
+ "ThirdParty", "Python", "Win64",
+ "python.exe")
+
+ subprocess.run([python_path, "-m",
+ "pip", "install", "pyside"])
+
+ if dev_mode or preset["dev_mode"]:
+ _prepare_cpp_project(project_file, engine_path)
+
+
+def _prepare_cpp_project(project_file: str, engine_path: str) -> None:
+ """
+ This function will add source files needed for project to be
+ rebuild along with the avalon integration plugin.
+
+ There seems not to be automated way to do it from command line.
+ But there might be way to create at least those target and build files
+ by some generator. This needs more research as manually writing
+ those files is rather hackish. :skull_and_crossbones:
+
+ :param project_file: path to .uproject file
+ :type project_file: str
+ :param engine_path: path to unreal engine associated with project
+ :type engine_path: str
+ """
+
+ project_name = os.path.splitext(os.path.basename(project_file))[0]
+ project_dir = os.path.dirname(project_file)
+ targets_dir = os.path.join(project_dir, "Source")
+ sources_dir = os.path.join(targets_dir, project_name)
+
+ os.makedirs(sources_dir, exist_ok=True)
+ os.makedirs(os.path.join(project_dir, "Content"), exist_ok=True)
+
+ module_target = '''
+using UnrealBuildTool;
+using System.Collections.Generic;
+
+public class {0}Target : TargetRules
+{{
+ public {0}Target( TargetInfo Target) : base(Target)
+ {{
+ Type = TargetType.Game;
+ ExtraModuleNames.AddRange( new string[] {{ "{0}" }} );
+ }}
+}}
+'''.format(project_name)
+
+ editor_module_target = '''
+using UnrealBuildTool;
+using System.Collections.Generic;
+
+public class {0}EditorTarget : TargetRules
+{{
+ public {0}EditorTarget( TargetInfo Target) : base(Target)
+ {{
+ Type = TargetType.Editor;
+
+ ExtraModuleNames.AddRange( new string[] {{ "{0}" }} );
+ }}
+}}
+'''.format(project_name)
+
+ module_build = '''
+using UnrealBuildTool;
+public class {0} : ModuleRules
+{{
+ public {0}(ReadOnlyTargetRules Target) : base(Target)
+ {{
+ PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
+ PublicDependencyModuleNames.AddRange(new string[] {{ "Core",
+ "CoreUObject", "Engine", "InputCore" }});
+ PrivateDependencyModuleNames.AddRange(new string[] {{ }});
+ }}
+}}
+'''.format(project_name)
+
+ module_cpp = '''
+#include "{0}.h"
+#include "Modules/ModuleManager.h"
+
+IMPLEMENT_PRIMARY_GAME_MODULE( FDefaultGameModuleImpl, {0}, "{0}" );
+'''.format(project_name)
+
+ module_header = '''
+#pragma once
+#include "CoreMinimal.h"
+'''
+
+ game_mode_cpp = '''
+#include "{0}GameModeBase.h"
+'''.format(project_name)
+
+ game_mode_h = '''
+#pragma once
+
+#include "CoreMinimal.h"
+#include "GameFramework/GameModeBase.h"
+#include "{0}GameModeBase.generated.h"
+
+UCLASS()
+class {1}_API A{0}GameModeBase : public AGameModeBase
+{{
+ GENERATED_BODY()
+}};
+'''.format(project_name, project_name.upper())
+
+ with open(os.path.join(
+ targets_dir, f"{project_name}.Target.cs"), mode="w") as f:
+ f.write(module_target)
+
+ with open(os.path.join(
+ targets_dir, f"{project_name}Editor.Target.cs"), mode="w") as f:
+ f.write(editor_module_target)
+
+ with open(os.path.join(
+ sources_dir, f"{project_name}.Build.cs"), mode="w") as f:
+ f.write(module_build)
+
+ with open(os.path.join(
+ sources_dir, f"{project_name}.cpp"), mode="w") as f:
+ f.write(module_cpp)
+
+ with open(os.path.join(
+ sources_dir, f"{project_name}.h"), mode="w") as f:
+ f.write(module_header)
+
+ with open(os.path.join(
+ sources_dir, f"{project_name}GameModeBase.cpp"), mode="w") as f:
+ f.write(game_mode_cpp)
+
+ with open(os.path.join(
+ sources_dir, f"{project_name}GameModeBase.h"), mode="w") as f:
+ f.write(game_mode_h)
+
+ if platform.system().lower() == "windows":
+ u_build_tool = (f"{engine_path}/Engine/Binaries/DotNET/"
+ "UnrealBuildTool.exe")
+ u_header_tool = (f"{engine_path}/Engine/Binaries/Win64/"
+ f"UnrealHeaderTool.exe")
+ elif platform.system().lower() == "linux":
+ # WARNING: there is no UnrealBuildTool on linux?
+ u_build_tool = ""
+ u_header_tool = ""
+ elif platform.system().lower() == "darwin":
+ # WARNING: there is no UnrealBuildTool on Mac?
+ u_build_tool = ""
+ u_header_tool = ""
+
+ u_build_tool = u_build_tool.replace("\\", "/")
+ u_header_tool = u_header_tool.replace("\\", "/")
+
+ command1 = [u_build_tool, "-projectfiles", f"-project={project_file}",
+ "-progress"]
+
+ subprocess.run(command1)
+
+ command2 = [u_build_tool, f"-ModuleWithSuffix={project_name},3555"
+ "Win64", "Development", "-TargetType=Editor"
+ f'-Project="{project_file}"', f'"{project_file}"'
+ "-IgnoreJunk"]
+
+ subprocess.run(command2)
+
+ """
+ uhtmanifest = os.path.join(os.path.dirname(project_file),
+ f"{project_name}.uhtmanifest")
+
+ command3 = [u_header_tool, f'"{project_file}"', f'"{uhtmanifest}"',
+ "-Unattended", "-WarningsAsErrors", "-installed"]
+
+ subprocess.run(command3)
+ """
diff --git a/pype/unreal/plugin.py b/pype/unreal/plugin.py
new file mode 100644
index 0000000000..0c00eb77d6
--- /dev/null
+++ b/pype/unreal/plugin.py
@@ -0,0 +1,11 @@
+from avalon import api
+
+
+class Creator(api.Creator):
+ """This serves as skeleton for future Pype specific functionality"""
+ pass
+
+
+class Loader(api.Loader):
+ """This serves as skeleton for future Pype specific functionality"""
+ pass
diff --git a/pype/vendor/pysync.py b/pype/vendor/pysync.py
new file mode 100644
index 0000000000..14a6dda34c
--- /dev/null
+++ b/pype/vendor/pysync.py
@@ -0,0 +1,216 @@
+#!/usr/local/bin/python3
+# https://github.com/snullp/pySync/blob/master/pySync.py
+
+import sys
+import shutil
+import os
+import time
+import configparser
+from os.path import (
+ getsize,
+ getmtime,
+ isfile,
+ isdir,
+ join,
+ abspath,
+ expanduser,
+ realpath
+)
+import logging
+
+log = logging.getLogger(__name__)
+
+ignoreFiles = ("Thumbs.db", ".DS_Store")
+
+# this feature is not yet implemented
+ignorePaths = []
+
+if os.name == 'nt':
+ # msvcrt can't function correctly in IDLE
+ if 'idlelib.run' in sys.modules:
+ print("Please don't run this script in IDLE.")
+ sys.exit(0)
+ import msvcrt
+
+ def flush_input(str, set=None):
+ if not set:
+ while msvcrt.kbhit():
+ ch = msvcrt.getch()
+ if ch == '\xff':
+ print("msvcrt is broken, this is weird.")
+ sys.exit(0)
+ return input(str)
+ else:
+ return set
+else:
+ import select
+
+ def flush_input(str, set=None):
+ if not set:
+ while len(select.select([sys.stdin.fileno()], [], [], 0.0)[0]) > 0:
+ os.read(sys.stdin.fileno(), 4096)
+ return input(str)
+ else:
+ return set
+
+
+def compare(fa, fb, options_input=[]):
+ if isfile(fa) == isfile(fb):
+ if isdir(fa):
+ walktree(fa, fb, options_input)
+ elif isfile(fa):
+ if getsize(fa) != getsize(fb) \
+ or int(getmtime(fa)) != int(getmtime(fb)):
+ log.info(str((fa, ': size=', getsize(fa), 'mtime=',
+ time.asctime(time.localtime(getmtime(fa))))))
+ log.info(str((fb, ': size=', getsize(fb), 'mtime=',
+ time.asctime(time.localtime(getmtime(fb))))))
+ if getmtime(fa) > getmtime(fb):
+ act = '>'
+ else:
+ act = '<'
+
+ set = [i for i in options_input if i in [">", "<"]][0]
+
+ s = flush_input('What to do?(>,<,r,n)[' + act + ']', set=set)
+ if len(s) > 0:
+ act = s[0]
+ if act == '>':
+ shutil.copy2(fa, fb)
+ elif act == '<':
+ shutil.copy2(fb, fa)
+ elif act == 'r':
+ if isdir(fa):
+ shutil.rmtree(fa)
+ elif isfile(fa):
+ os.remove(fa)
+ else:
+ log.info(str(('Remove: Skipping', fa)))
+ if isdir(fb):
+ shutil.rmtree(fb)
+ elif isfile(fb):
+ os.remove(fb)
+ else:
+ log.info(str(('Remove: Skipping', fb)))
+
+ else:
+ log.debug(str(('Compare: Skipping non-dir and non-file', fa)))
+ else:
+ log.error(str(('Error:', fa, ',', fb, 'have different file type')))
+
+
+def copy(fa, fb, options_input=[]):
+ set = [i for i in options_input if i in ["y"]][0]
+ s = flush_input('Copy ' + fa + ' to another side?(r,y,n)[y]', set=set)
+ if len(s) > 0:
+ act = s[0]
+ else:
+ act = 'y'
+ if act == 'y':
+ if isdir(fa):
+ shutil.copytree(fa, fb)
+ elif isfile(fa):
+ shutil.copy2(fa, fb)
+ else:
+ log.debug(str(('Copy: Skipping ', fa)))
+ elif act == 'r':
+ if isdir(fa):
+ shutil.rmtree(fa)
+ elif isfile(fa):
+ os.remove(fa)
+ else:
+ log.debug(str(('Remove: Skipping ', fa)))
+
+
+stoentry = []
+tarentry = []
+
+
+def walktree(source, target, options_input=[]):
+ srclist = os.listdir(source)
+ tarlist = os.listdir(target)
+ if '!sync' in srclist:
+ return
+ if '!sync' in tarlist:
+ return
+ # files in source dir...
+ for f in srclist:
+ if f in ignoreFiles:
+ continue
+ spath = join(source, f)
+ tpath = join(target, f)
+ if spath in ignorePaths:
+ continue
+ if spath in stoentry:
+ # just in case target also have this one
+ if f in tarlist:
+ del tarlist[tarlist.index(f)]
+ continue
+
+ # if also exists in target dir
+ if f in tarlist:
+ del tarlist[tarlist.index(f)]
+ compare(spath, tpath, options_input)
+
+ # exists in source dir only
+ else:
+ copy(spath, tpath, options_input)
+
+ # exists in target dir only
+ set = [i for i in options_input if i in ["<"]]
+
+ for f in tarlist:
+ if f in ignoreFiles:
+ continue
+ spath = join(source, f)
+ tpath = join(target, f)
+ if tpath in ignorePaths:
+ continue
+ if tpath in tarentry:
+ continue
+ if set:
+ copy(tpath, spath, options_input)
+ else:
+ print("REMOVING: {}".format(f))
+ if os.path.isdir(tpath):
+ shutil.rmtree(tpath)
+ else:
+ os.remove(tpath)
+ print("REMOVING: {}".format(f))
+
+
+if __name__ == '__main__':
+ stoconf = configparser.RawConfigParser()
+ tarconf = configparser.RawConfigParser()
+ stoconf.read("pySync.ini")
+ tarconf.read(expanduser("~/.pysync"))
+ stoname = stoconf.sections()[0]
+ tarname = tarconf.sections()[0]
+
+ # calculate storage's base folder
+ if stoconf.has_option(stoname, 'BASE'):
+ stobase = abspath(stoconf.get(stoname, 'BASE'))
+ stoconf.remove_option(stoname, 'BASE')
+ else:
+ stobase = os.getcwd()
+
+ # same, for target's base folder
+ if tarconf.has_option(tarname, 'BASE'):
+ tarbase = abspath(tarconf.get(tarname, 'BASE'))
+ tarconf.remove_option(tarname, 'BASE')
+ else:
+ tarbase = expanduser('~/')
+
+ print("Syncing between", stoname, "and", tarname)
+ sto_content = {x: realpath(join(stobase, stoconf.get(stoname, x)))
+ for x in stoconf.options(stoname)}
+ tar_content = {x: realpath(join(tarbase, tarconf.get(tarname, x)))
+ for x in tarconf.options(tarname)}
+ stoentry = [sto_content[x] for x in sto_content]
+ tarentry = [tar_content[x] for x in tar_content]
+
+ for folder in sto_content:
+ if folder in tar_content:
+ print('Processing', folder)
+ walktree(sto_content[folder], tar_content[folder], options_input)
+ print("Done.")
diff --git a/pype/version.py b/pype/version.py
new file mode 100644
index 0000000000..892994aa6c
--- /dev/null
+++ b/pype/version.py
@@ -0,0 +1 @@
+__version__ = "2.8.0"
diff --git a/res/app_icons/ue4.png b/res/app_icons/ue4.png
new file mode 100644
index 0000000000..39201de664
Binary files /dev/null and b/res/app_icons/ue4.png differ
diff --git a/schema/master_version-1.0.json b/schema/master_version-1.0.json
new file mode 100644
index 0000000000..9dff570b3a
--- /dev/null
+++ b/schema/master_version-1.0.json
@@ -0,0 +1,44 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "title": "pype:master_version-1.0",
+ "description": "Master version of asset",
+
+ "type": "object",
+
+ "additionalProperties": true,
+
+ "required": [
+ "version_id",
+ "schema",
+ "type",
+ "parent"
+ ],
+
+ "properties": {
+ "_id": {
+ "description": "Document's id (database will create it's if not entered)",
+ "example": "ObjectId(592c33475f8c1b064c4d1696)"
+ },
+ "version_id": {
+ "description": "The version ID from which it was created",
+ "example": "ObjectId(592c33475f8c1b064c4d1695)"
+ },
+ "schema": {
+ "description": "The schema associated with this document",
+ "type": "string",
+ "enum": ["avalon-core:master_version-1.0", "pype:master_version-1.0"],
+ "example": "pype:master_version-1.0"
+ },
+ "type": {
+ "description": "The type of document",
+ "type": "string",
+ "enum": ["master_version"],
+ "example": "master_version"
+ },
+ "parent": {
+ "description": "Unique identifier to parent document",
+ "example": "ObjectId(592c33475f8c1b064c4d1697)"
+ }
+ }
+}
diff --git a/setup/nuke/nuke_path/menu.py b/setup/nuke/nuke_path/menu.py
index 15702fa364..be4f39b542 100644
--- a/setup/nuke/nuke_path/menu.py
+++ b/setup/nuke/nuke_path/menu.py
@@ -4,8 +4,8 @@ import KnobScripter
from pype.nuke.lib import (
writes_version_sync,
- onScriptLoad,
- checkInventoryVersions
+ on_script_load,
+ check_inventory_versions
)
import nuke
@@ -15,9 +15,9 @@ log = Logger().get_logger(__name__, "nuke")
# nuke.addOnScriptSave(writes_version_sync)
-nuke.addOnScriptSave(onScriptLoad)
-nuke.addOnScriptLoad(checkInventoryVersions)
-nuke.addOnScriptSave(checkInventoryVersions)
+nuke.addOnScriptSave(on_script_load)
+nuke.addOnScriptLoad(check_inventory_versions)
+nuke.addOnScriptSave(check_inventory_versions)
# nuke.addOnScriptSave(writes_version_sync)
log.info('Automatic syncing of write file knob to script version')