mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merged environments into master
This commit is contained in:
commit
166ee38126
7 changed files with 612 additions and 0 deletions
46
colorbleed/plugins/maya/publish/_validate_node_ids.py
Normal file
46
colorbleed/plugins/maya/publish/_validate_node_ids.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateNodeIds(pyblish.api.InstancePlugin):
|
||||
"""Validate nodes have colorbleed id attributes
|
||||
|
||||
All look sets should have id attributes.
|
||||
|
||||
"""
|
||||
|
||||
label = 'Node Id Attributes'
|
||||
families = ['colorbleed.look', 'colorbleed.model']
|
||||
hosts = ['maya']
|
||||
order = colorbleed.api.ValidatePipelineOrder
|
||||
actions = [colorbleed.api.SelectInvalidAction,
|
||||
colorbleed.api.GenerateUUIDsOnInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
import maya.cmds as cmds
|
||||
|
||||
nodes = instance.data["setMembers"]
|
||||
|
||||
# Ensure all nodes have a cbId
|
||||
data_id = {}
|
||||
invalid = []
|
||||
for node in nodes:
|
||||
try:
|
||||
uuid = cmds.getAttr("{}.cbId".format(node))
|
||||
data_id[uuid] = node
|
||||
if uuid in data_id:
|
||||
invalid.append(node)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
"""Process all meshes"""
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("Nodes found with invalid"
|
||||
"asset IDs: {0}".format(invalid))
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
import re
|
||||
|
||||
import pyblish.api
|
||||
import colorbleed.api
|
||||
|
||||
|
||||
class ValidateFileNameConvention(pyblish.api.InstancePlugin):
|
||||
|
||||
label = ""
|
||||
families = ["colorbleed.lookdev"]
|
||||
host = ["maya"]
|
||||
optional = True
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
actions = [colorbleed.api.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance):
|
||||
|
||||
invalid = []
|
||||
# todo: change pattern to company standard
|
||||
pattern = re.compile("[a-zA-Z]+_[A-Z]{3}")
|
||||
|
||||
nodes = list(instance)
|
||||
for node in nodes:
|
||||
match = pattern.match(node)
|
||||
if not match:
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
self.log.error("Found invalid naming convention. Failed noted :\n"
|
||||
"%s" % invalid)
|
||||
94
colorbleed/plugins/publish/collect_resource_destination.py
Normal file
94
colorbleed/plugins/publish/collect_resource_destination.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
import avalon.io as io
|
||||
|
||||
|
||||
class CollectResourceDestination(pyblish.api.InstancePlugin):
|
||||
"""This plug-ins displays the comment dialog box per default"""
|
||||
|
||||
label = "Collect Resource Destination"
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.create_destination_template(instance)
|
||||
|
||||
template_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
|
||||
mock_template = template.format(**template_data)
|
||||
|
||||
# For now assume resources end up in a "resources" folder in the
|
||||
# published folder
|
||||
mock_destination = os.path.join(os.path.dirname(mock_template),
|
||||
"resources")
|
||||
|
||||
# Clean the path
|
||||
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
|
||||
|
||||
# Define resource destination and transfers
|
||||
resources = instance.data.get("resources", list())
|
||||
transfers = instance.data.get("transfers", list())
|
||||
for resource in resources:
|
||||
|
||||
# Add destination to the resource
|
||||
source_filename = os.path.basename(resource["source"])
|
||||
destination = os.path.join(mock_destination, source_filename)
|
||||
resource['destination'] = destination
|
||||
|
||||
# Collect transfers for the individual files of the resource
|
||||
# e.g. all individual files of a cache or UDIM textures.
|
||||
files = resource['files']
|
||||
for fsrc in files:
|
||||
fname = os.path.basename(fsrc)
|
||||
fdest = os.path.join(mock_destination, fname)
|
||||
transfers.append([fsrc, fdest])
|
||||
|
||||
instance.data["resources"] = resources
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def create_destination_template(self, instance):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
project_name = os.environ["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True})
|
||||
template = project["config"]["template"]["publish"]
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
# if there is a subset there ought to be version
|
||||
version_number += version["name"]
|
||||
|
||||
template_data = {"root": os.environ["AVALON_ROOT"],
|
||||
"project": project_name,
|
||||
"silo": os.environ["AVALON_SILO"],
|
||||
"asset": instance.data["asset"],
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"representation": "TEMP"}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
instance.data["template"] = template
|
||||
302
colorbleed/plugins/publish/integrate.py
Normal file
302
colorbleed/plugins/publish/integrate.py
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
import os
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
import errno
|
||||
import pyblish.api
|
||||
from avalon import api, io
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IntegrateAsset(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Intergrate Asset"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["colorbleed.model",
|
||||
"colorbleed.rig",
|
||||
"colorbleed.animation",
|
||||
"colorbleed.camera",
|
||||
"colorbleed.lookdev",
|
||||
"colorbleed.texture",
|
||||
"colorbleed.historyLookdev",
|
||||
"colorbleed.group"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
self.log.info("Integrating Asset in to the database ...")
|
||||
|
||||
self.register(instance)
|
||||
self.intergrate(instance)
|
||||
|
||||
self.log.info("Removing temporary files and folders ...")
|
||||
stagingdir = instance.data["stagingDir"]
|
||||
shutil.rmtree(stagingdir)
|
||||
|
||||
def register(self, instance):
|
||||
|
||||
# Required environment variables
|
||||
PROJECT = os.environ["AVALON_PROJECT"]
|
||||
ASSET = instance.data.get("asset") or os.environ["AVALON_ASSET"]
|
||||
SILO = os.environ["AVALON_SILO"]
|
||||
LOCATION = os.getenv("AVALON_LOCATION")
|
||||
|
||||
# todo(marcus): avoid hardcoding labels in the integrator
|
||||
representation_labels = {".ma": "Maya Ascii",
|
||||
".source": "Original source file",
|
||||
".abc": "Alembic"}
|
||||
|
||||
context = instance.context
|
||||
# Atomicity
|
||||
#
|
||||
# Guarantee atomic publishes - each asset contains
|
||||
# an identical set of members.
|
||||
# __
|
||||
# / o
|
||||
# / \
|
||||
# | o |
|
||||
# \ /
|
||||
# o __/
|
||||
#
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Atomicity not held, aborting.")
|
||||
|
||||
# Assemble
|
||||
#
|
||||
# |
|
||||
# v
|
||||
# ---> <----
|
||||
# ^
|
||||
# |
|
||||
#
|
||||
stagingdir = instance.data.get("stagingDir")
|
||||
assert stagingdir, ("Incomplete instance \"%s\": "
|
||||
"Missing reference to staging area." % instance)
|
||||
|
||||
# extra check if stagingDir actually exists and is available
|
||||
|
||||
self.log.debug("Establishing staging directory @ %s" % stagingdir)
|
||||
|
||||
project = io.find_one({"type": "project"},
|
||||
projection={"config.template.publish": True})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
||||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
next_version += latest_version["name"]
|
||||
|
||||
self.log.debug("Next version: %i" % next_version)
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
version = self.create_version(subset=subset,
|
||||
version_number=next_version,
|
||||
locations=[LOCATION],
|
||||
data=version_data)
|
||||
|
||||
self.log.debug("Creating version ...")
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
|
||||
# Write to disk
|
||||
# _
|
||||
# | |
|
||||
# _| |_
|
||||
# ____\ /
|
||||
# |\ \ / \
|
||||
# \ \ v \
|
||||
# \ \________.
|
||||
# \|________|
|
||||
#
|
||||
root = api.registered_root()
|
||||
template_data = {"root": root,
|
||||
"project": PROJECT,
|
||||
"silo": SILO,
|
||||
"asset": ASSET,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"]}
|
||||
|
||||
template_publish = project["config"]["template"]["publish"]
|
||||
|
||||
representations = []
|
||||
staging_content = os.listdir(stagingdir)
|
||||
for v, fname in enumerate(staging_content):
|
||||
|
||||
name, ext = os.path.splitext(fname)
|
||||
template_data["representation"] = ext[1:]
|
||||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
dst = template_publish.format(**template_data)
|
||||
|
||||
# Backwards compatibility
|
||||
if fname == ".metadata.json":
|
||||
dirname = os.path.dirname(dst)
|
||||
dst = os.path.join(dirname, fname)
|
||||
|
||||
# copy source to destination (library)
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
representation = {
|
||||
"schema": "avalon-core:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": ext[1:],
|
||||
"data": {"label": representation_labels.get(ext)},
|
||||
"dependencies": instance.data.get("dependencies", "").split(),
|
||||
|
||||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"project": PROJECT,
|
||||
"asset": ASSET,
|
||||
"silo": SILO,
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
representations.append(representation)
|
||||
|
||||
# store data for database and source / destinations
|
||||
instance.data["representations"] = representations
|
||||
|
||||
return representations
|
||||
|
||||
def intergrate(self, instance):
|
||||
"""Register the representations and move the files
|
||||
|
||||
Through the stored `representations` and `transfers`
|
||||
|
||||
Args:
|
||||
instance: the instance to integrate
|
||||
"""
|
||||
|
||||
# get needed data
|
||||
traffic = instance.data["transfers"]
|
||||
representations = instance.data["representations"]
|
||||
|
||||
self.log.info("Registering {} items".format(len(representations)))
|
||||
io.insert_many(representations)
|
||||
|
||||
# moving files
|
||||
for src, dest in traffic:
|
||||
self.log.info("Copying file .. {} -> {}".format(src, dest))
|
||||
self.copy_file(src, dest)
|
||||
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
src (str): the source file which needs to be copied
|
||||
dst (str): the destination of the sourc file
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
dirname = os.path.dirname(dst)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
self.log.critical("An unexpected error occurred.")
|
||||
raise
|
||||
|
||||
shutil.copy(src, dst)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info("Subset '%s' not found, creating.." % subset_name)
|
||||
|
||||
_id = io.insert_one({
|
||||
"schema": "avalon-core:subset-2.0",
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"data": {},
|
||||
"parent": asset["_id"]
|
||||
}).inserted_id
|
||||
|
||||
subset = io.find_one({"_id": _id})
|
||||
|
||||
return subset
|
||||
|
||||
def create_version(self, subset, version_number, locations, data=None):
|
||||
""" Copy given source to destination
|
||||
|
||||
Arguments:
|
||||
subset (dict): the registered subset of the asset
|
||||
version_number (int): the version number
|
||||
locations (list): the currently registered locations
|
||||
"""
|
||||
# Imprint currently registered location
|
||||
version_locations = [location for location in locations if
|
||||
location is not None]
|
||||
|
||||
return {"schema": "avalon-core:version-2.0",
|
||||
"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": version_number,
|
||||
"locations": version_locations,
|
||||
"data": data}
|
||||
|
||||
def create_version_data(self, context, instance):
|
||||
"""Create the data collection for th version
|
||||
|
||||
Args:
|
||||
context: the current context
|
||||
instance: the current instance being published
|
||||
|
||||
Returns:
|
||||
dict: the required information with instance.data as key
|
||||
"""
|
||||
|
||||
families = []
|
||||
current_families = instance.data.get("families", list())
|
||||
instance_family = instance.data.get("family", None)
|
||||
|
||||
families += current_families
|
||||
if instance_family is not None:
|
||||
families.append(instance_family)
|
||||
|
||||
# create relative source path for DB
|
||||
relative_path = os.path.relpath(context.data["currentFile"],
|
||||
api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
|
||||
version_data = {"families": families,
|
||||
"time": context.data["time"],
|
||||
"author": context.data["user"],
|
||||
"source": source,
|
||||
"comment": context.data.get("comment")}
|
||||
|
||||
return dict(instance.data, **version_data)
|
||||
70
maya_environment.bat
Normal file
70
maya_environment.bat
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
@echo OFF
|
||||
|
||||
echo Entering Maya2016 environment...
|
||||
|
||||
:: Environment: Maya
|
||||
set CB_MAYA_VERSION=2016
|
||||
set CB_MAYA_SHARED=%CB_APP_SHARED%\maya_shared\%CB_MAYA_VERSION%
|
||||
|
||||
if "%CB_MAYA_SHARED%" == "" (
|
||||
echo Error: "CB_MAYA_SHARED" not set
|
||||
goto :eof
|
||||
)
|
||||
|
||||
|
||||
:: Colorbleed Maya
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cbMayaScripts;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\inventory\python;%PYTHONPATH%
|
||||
|
||||
:: Maya shared
|
||||
set MAYA_PLUG_IN_PATH=%CB_MAYA_SHARED%\plugins;%MAYA_PLUGIN_PATH%
|
||||
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\prefs\shelves;%MAYA_SHELF_PATH%
|
||||
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts;%MAYA_SCRIPT_PATH%
|
||||
set XBMLANGPATH=%CB_MAYA_SHARED%\prefs\icons;%XBMLANGPATH%
|
||||
set MAYA_PRESET_PATH=%CB_MAYA_SHARED%\prefs\attrPresets;%MAYA_PRESET_PATH%
|
||||
set PYTHONPATH=%CB_MAYA_SHARED%\scripts;%PYTHONPATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules;%MAYA_MODULE_PATH%
|
||||
|
||||
:: Additional modules
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\mGear_2016;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\SOuP;%MAYA_MODULE_PATH%
|
||||
set MAYA_SHELF_PATH=%CB_MAYA_SHARED%\modules\SOuP\shelves;%MAYA_SHELF_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\pdipro35c_Maya2016x64;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\ovdb\maya\maya2016;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\cvshapeinverter;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Toolchefs;%MAYA_MODULE_PATH%
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Exocortex;%MAYA_MODULE_PATH%
|
||||
|
||||
:: Miarmy
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy;%MAYA_MODULE_PATH%
|
||||
set PATH=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin;%PATH%
|
||||
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Basefount\Miarmy\bin\vray\vray_3.1_3.3_3.4\Maya2015and2016;%VRAY_PLUGINS_x64%;
|
||||
|
||||
:: Yeti
|
||||
set MAYA_MODULE_PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64;%MAYA_MODULE_PATH%
|
||||
set PATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%PATH%;
|
||||
set VRAY_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_PLUGINS_x64%;
|
||||
set VRAY_FOR_MAYA2016_PLUGINS_x64=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\bin;%VRAY_FOR_MAYA2016_PLUGINS_x64%;
|
||||
set REDSHIFT_MAYAEXTENSIONSPATH=%CB_MAYA_SHARED%\modules\Yeti-v2.1.5_Maya2016-windows64\plug-ins;%REDSHIFT_MAYAEXTENSIONSPATH%
|
||||
set peregrinel_LICENSE=5053@CBserver
|
||||
|
||||
:: maya-capture
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-capture-gui-cb;%PYTHONPATH%
|
||||
|
||||
:: maya-matrix-deform
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\maya-matrix-deformers;%PYTHONPATH%
|
||||
set MAYA_PLUG_IN_PATH=%CB_PIPELINE%\git\maya-matrix-deformers\plugin;%MAYA_PLUG_IN_PATH%
|
||||
|
||||
:: rapid-rig
|
||||
set XBMLANGPATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%XBMLANGPATH%
|
||||
set MAYA_SCRIPT_PATH=%CB_MAYA_SHARED%\scripts\RapidRig_Modular_V02;%MAYA_SCRIPT_PATH%
|
||||
|
||||
|
||||
:: Fix Maya Playblast Color Management depth
|
||||
set MAYA_FLOATING_POINT_RT_PLAYBLAST=1
|
||||
|
||||
|
||||
:: Fix V-ray forcing affinity to 100%
|
||||
set VRAY_USE_THREAD_AFFINITY=0
|
||||
30
python_environment.bat
Normal file
30
python_environment.bat
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
@echo OFF
|
||||
echo Entering Python environment...
|
||||
|
||||
set CB_PYTHON_VERSION=2.7
|
||||
|
||||
where /Q python.exe
|
||||
if ERRORLEVEL 1 (
|
||||
if EXIST C:\Python27\python.exe (
|
||||
echo Adding C:\Python27 to PATH
|
||||
set "PATH=%PATH%;C:\Python27"
|
||||
goto:has-python
|
||||
) else (
|
||||
echo Adding embedded python (pipeline)
|
||||
set "PATH=%PATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\bin"
|
||||
goto:has-python
|
||||
)
|
||||
)
|
||||
:has-python
|
||||
|
||||
:: Python universal (non-compiled)
|
||||
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\universal\site-packages
|
||||
|
||||
:: Python version/windows-specific
|
||||
:: set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\win\%CB_PYTHON_VERSION%
|
||||
|
||||
:: Python standalone (compiled to version)
|
||||
if NOT "%CB_PYTHON_STANDALONE%" == "0" (
|
||||
echo Entering Python Standalone environment...
|
||||
set PYTHONPATH=%PYTHONPATH%;%CB_APP_SHARED%\python\standalone\%CB_PYTHON_VERSION%\site-packages
|
||||
)
|
||||
33
set_environment.bat
Normal file
33
set_environment.bat
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
@echo off
|
||||
echo Entering pipeline (raw development) environment...
|
||||
|
||||
:: Initialize environment
|
||||
set CB_PIPELINE=P:\pipeline\dev
|
||||
|
||||
set CB_APP_SHARED=%CB_PIPELINE%\apps
|
||||
|
||||
if "%CB_APP_SHARED%" == "" (
|
||||
echo Error: "CB_APP_SHARED" not set
|
||||
goto :eof
|
||||
)
|
||||
|
||||
echo setting STORAGE..
|
||||
set STORAGE=P:
|
||||
|
||||
set LAUNCHER_ROOT=%~dp0/launchers
|
||||
|
||||
:: Core
|
||||
echo Add cb core..
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cb;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\cbra;%PYTHONPATH%
|
||||
|
||||
:: Extra
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\pyseq;%PYTHONPATH%
|
||||
set PYTHONPATH=%CB_PIPELINE%\git\Qt.py;%PYTHONPATH%
|
||||
|
||||
|
||||
:: Ftrack-connect
|
||||
::set PYTHONPATH=%CB_PIPELINE%\git\ftrack-connect\source;%PYTHONPATH%
|
||||
|
||||
:: FFMPEG
|
||||
set FFMPEG_PATH=%CB_APP_SHARED%\ffmpeg\bin\ffmpeg.exe
|
||||
Loading…
Add table
Add a link
Reference in a new issue