Merge branch 'develop' into feature/OP-4859_cant-assign-shaders-to-the-ass-file

This commit is contained in:
Toke Stuart Jepsen 2023-02-16 11:11:53 +00:00
commit 455cb65543
21 changed files with 380 additions and 184 deletions

View file

@ -6,8 +6,7 @@ from openpype.hosts.aftereffects import api
from openpype.pipeline import (
Creator,
CreatedInstance,
CreatorError,
legacy_io,
CreatorError
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
from openpype.lib import prepare_template_data
@ -127,7 +126,7 @@ class RenderCreator(Creator):
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change[1])
subset_change.new_value)
def remove_instances(self, instances):
for instance in instances:
@ -195,7 +194,7 @@ class RenderCreator(Creator):
instance_data.pop("uuid")
if not instance_data.get("task"):
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
instance_data["task"] = self.create_context.get_current_task_name()
if not instance_data.get("creator_attributes"):
is_old_farm = instance_data["family"] != "renderLocal"

View file

@ -2,8 +2,7 @@ import openpype.hosts.aftereffects.api as api
from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
legacy_io,
CreatedInstance
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
@ -38,10 +37,11 @@ class AEWorkfileCreator(AutoCreator):
existing_instance = instance
break
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
host_name = legacy_io.Session["AVALON_APP"]
context = self.create_context
project_name = context.get_current_project_name()
asset_name = context.get_current_asset_name()
task_name = context.get_current_task_name()
host_name = context.host_name
if existing_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)

View file

@ -143,6 +143,9 @@ class ExtractSubsetResources(publish.Extractor):
# create staging dir path
staging_dir = self.staging_dir(instance)
# append staging dir for later cleanup
instance.context.data["cleanupFullPaths"].append(staging_dir)
# add default preset type for thumbnail and reviewable video
# update them with settings and override in case the same
# are found in there
@ -548,30 +551,3 @@ class ExtractSubsetResources(publish.Extractor):
"Path `{}` is containing more that one clip".format(path)
)
return clips[0]
def staging_dir(self, instance):
"""Provide a temporary directory in which to store extracted files
Upon calling this method the staging directory is stored inside
the instance.data['stagingDir']
"""
staging_dir = instance.data.get('stagingDir', None)
openpype_temp_dir = os.getenv("OPENPYPE_TEMP_DIR")
if not staging_dir:
if openpype_temp_dir and os.path.exists(openpype_temp_dir):
staging_dir = os.path.normpath(
tempfile.mkdtemp(
prefix="pyblish_tmp_",
dir=openpype_temp_dir
)
)
else:
staging_dir = os.path.normpath(
tempfile.mkdtemp(prefix="pyblish_tmp_")
)
instance.data['stagingDir'] = staging_dir
instance.context.data["cleanupFullPaths"].append(staging_dir)
return staging_dir

View file

@ -65,20 +65,20 @@ class ArnoldStandinLoader(load.LoaderPlugin):
# Create transform with shape
transform_name = label + "_standin"
standinShape = mtoa.ui.arnoldmenu.createStandIn()
standin = cmds.listRelatives(standinShape, parent=True)[0]
standin_shape = mtoa.ui.arnoldmenu.createStandIn()
standin = cmds.listRelatives(standin_shape, parent=True)[0]
standin = cmds.rename(standin, transform_name)
standinShape = cmds.listRelatives(standin, shapes=True)[0]
standin_shape = cmds.listRelatives(standin, shapes=True)[0]
cmds.parent(standin, root)
# Set the standin filepath
path, operator = self._setup_proxy(
standinShape, self.fname, namespace
standin_shape, self.fname, namespace
)
cmds.setAttr(standinShape + ".dso", path, type="string")
cmds.setAttr(standin_shape + ".dso", path, type="string")
sequence = is_sequence(os.listdir(os.path.dirname(self.fname)))
cmds.setAttr(standinShape + ".useFrameExtension", sequence)
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
nodes = [root, standin]
if operator is not None:

View file

@ -42,7 +42,6 @@ Provides:
import re
import os
import platform
import json
from maya import cmds
import maya.app.renderSetup.model.renderSetup as renderSetup
@ -320,7 +319,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
"renderSetupIncludeLights"
),
"strict_error_checking": render_instance.data.get(
"strict_error_checking")
"strict_error_checking", True
)
}
# Collect Deadline url if Deadline module is enabled

View file

@ -193,7 +193,7 @@ class ImageCreator(Creator):
instance_data.pop("uuid")
if not instance_data.get("task"):
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
instance_data["task"] = self.create_context.get_current_task_name()
if not instance_data.get("variant"):
instance_data["variant"] = ''

View file

@ -2,8 +2,7 @@ import openpype.hosts.photoshop.api as api
from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
legacy_io
CreatedInstance
)
from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
@ -38,10 +37,11 @@ class PSWorkfileCreator(AutoCreator):
existing_instance = instance
break
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
host_name = legacy_io.Session["AVALON_APP"]
context = self.create_context
project_name = context.get_current_project_name()
asset_name = context.get_current_asset_name()
task_name = context.get_current_task_name()
host_name = context.host_name
if existing_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
subset_name = self.get_subset_name(

View file

@ -8,7 +8,7 @@ import inspect
from uuid import uuid4
from contextlib import contextmanager
from openpype.client import get_assets
from openpype.client import get_assets, get_asset_by_name
from openpype.settings import (
get_system_settings,
get_project_settings
@ -17,13 +17,10 @@ from openpype.lib.attribute_definitions import (
UnknownDef,
serialize_attr_defs,
deserialize_attr_defs,
get_default_values,
)
from openpype.host import IPublishHost
from openpype.pipeline import legacy_io
from openpype.pipeline.mongodb import (
AvalonMongoDB,
session_data_from_environment,
)
from .creator_plugins import (
Creator,
@ -1338,8 +1335,6 @@ class CreateContext:
Args:
host(ModuleType): Host implementation which handles implementation and
global metadata.
dbcon(AvalonMongoDB): Connection to mongo with context (at least
project).
headless(bool): Context is created out of UI (Current not used).
reset(bool): Reset context on initialization.
discover_publish_plugins(bool): Discover publish plugins during reset
@ -1347,16 +1342,8 @@ class CreateContext:
"""
def __init__(
self, host, dbcon=None, headless=False, reset=True,
discover_publish_plugins=True
self, host, headless=False, reset=True, discover_publish_plugins=True
):
# Create conncetion if is not passed
if dbcon is None:
session = session_data_from_environment(True)
dbcon = AvalonMongoDB(session)
dbcon.install()
self.dbcon = dbcon
self.host = host
# Prepare attribute for logger (Created on demand in `log` property)
@ -1380,6 +1367,10 @@ class CreateContext:
" Missing methods: {}"
).format(joined_methods))
self._current_project_name = None
self._current_asset_name = None
self._current_task_name = None
self._host_is_valid = host_is_valid
# Currently unused variable
self.headless = headless
@ -1499,11 +1490,20 @@ class CreateContext:
@property
def host_name(self):
if hasattr(self.host, "name"):
return self.host.name
return os.environ["AVALON_APP"]
@property
def project_name(self):
return self.dbcon.active_project()
def get_current_project_name(self):
return self._current_project_name
def get_current_asset_name(self):
return self._current_asset_name
def get_current_task_name(self):
return self._current_task_name
project_name = property(get_current_project_name)
@property
def log(self):
@ -1520,7 +1520,7 @@ class CreateContext:
self.reset_preparation()
self.reset_avalon_context()
self.reset_current_context()
self.reset_plugins(discover_publish_plugins)
self.reset_context_data()
@ -1567,14 +1567,22 @@ class CreateContext:
self._collection_shared_data = None
self.refresh_thumbnails()
def reset_avalon_context(self):
"""Give ability to reset avalon context.
def reset_current_context(self):
"""Refresh current context.
Reset is based on optional host implementation of `get_current_context`
function or using `legacy_io.Session`.
Some hosts have ability to change context file without using workfiles
tool but that change is not propagated to
tool but that change is not propagated to 'legacy_io.Session'
nor 'os.environ'.
Todos:
UI: Current context should be also checked on save - compare
initial values vs. current values.
Related to UI checks: Current workfile can be also considered
as current context information as that's where the metadata
are stored. We should store the workfile (if is available) too.
"""
project_name = asset_name = task_name = None
@ -1592,12 +1600,9 @@ class CreateContext:
if not task_name:
task_name = legacy_io.Session.get("AVALON_TASK")
if project_name:
self.dbcon.Session["AVALON_PROJECT"] = project_name
if asset_name:
self.dbcon.Session["AVALON_ASSET"] = asset_name
if task_name:
self.dbcon.Session["AVALON_TASK"] = task_name
self._current_project_name = project_name
self._current_asset_name = asset_name
self._current_task_name = task_name
def reset_plugins(self, discover_publish_plugins=True):
"""Reload plugins.
@ -1792,40 +1797,128 @@ class CreateContext:
with self.bulk_instances_collection():
self._bulk_instances_to_process.append(instance)
def create(self, identifier, *args, **kwargs):
"""Wrapper for creators to trigger created.
def _get_creator_in_create(self, identifier):
"""Creator by identifier with unified error.
Different types of creators may expect different arguments thus the
hints for args are blind.
Helper method to get creator by identifier with same error when creator
is not available.
Args:
identifier (str): Creator's identifier.
*args (Tuple[Any]): Arguments for create method.
**kwargs (Dict[Any, Any]): Keyword argument for create method.
identifier (str): Identifier of creator plugin.
Returns:
BaseCreator: Creator found by identifier.
Raises:
CreatorError: When identifier is not known.
"""
error_message = "Failed to run Creator with identifier \"{}\". {}"
creator = self.creators.get(identifier)
label = getattr(creator, "label", None)
failed = False
add_traceback = False
exc_info = None
try:
# Fake CreatorError (Could be maybe specific exception?)
if creator is None:
# Fake CreatorError (Could be maybe specific exception?)
if creator is None:
raise CreatorError(
"Creator {} was not found".format(identifier)
)
return creator
def create(
self,
creator_identifier,
variant,
asset_doc=None,
task_name=None,
pre_create_data=None
):
"""Trigger create of plugins with standartized arguments.
Arguments 'asset_doc' and 'task_name' use current context as default
values. If only 'task_name' is provided it will be overriden by
task name from current context. If 'task_name' is not provided
when 'asset_doc' is, it is considered that task name is not specified,
which can lead to error if subset name template requires task name.
Args:
creator_identifier (str): Identifier of creator plugin.
variant (str): Variant used for subset name.
asset_doc (Dict[str, Any]): Asset document which define context of
creation (possible context of created instance/s).
task_name (str): Name of task to which is context related.
pre_create_data (Dict[str, Any]): Pre-create attribute values.
Returns:
Any: Output of triggered creator's 'create' method.
Raises:
CreatorError: If creator was not found or asset is empty.
"""
creator = self._get_creator_in_create(creator_identifier)
project_name = self.project_name
if asset_doc is None:
asset_name = self.get_current_asset_name()
asset_doc = get_asset_by_name(project_name, asset_name)
task_name = self.get_current_task_name()
if asset_doc is None:
raise CreatorError(
"Creator {} was not found".format(identifier)
"Asset with name {} was not found".format(asset_name)
)
creator.create(*args, **kwargs)
if pre_create_data is None:
pre_create_data = {}
precreate_attr_defs = creator.get_pre_create_attr_defs() or []
# Create default values of precreate data
_pre_create_data = get_default_values(precreate_attr_defs)
# Update passed precreate data to default values
# TODO validate types
_pre_create_data.update(pre_create_data)
subset_name = creator.get_subset_name(
variant,
task_name,
asset_doc,
project_name,
self.host_name
)
instance_data = {
"asset": asset_doc["name"],
"task": task_name,
"family": creator.family,
"variant": variant
}
return creator.create(
subset_name,
instance_data,
_pre_create_data
)
def _create_with_unified_error(
self, identifier, creator, *args, **kwargs
):
error_message = "Failed to run Creator with identifier \"{}\". {}"
label = None
add_traceback = False
result = None
fail_info = None
success = False
try:
# Try to get creator and his label
if creator is None:
creator = self._get_creator_in_create(identifier)
label = getattr(creator, "label", label)
# Run create
result = creator.create(*args, **kwargs)
success = True
except CreatorError:
failed = True
exc_info = sys.exc_info()
self.log.warning(error_message.format(identifier, exc_info[1]))
except:
failed = True
add_traceback = True
exc_info = sys.exc_info()
self.log.warning(
@ -1833,12 +1926,35 @@ class CreateContext:
exc_info=True
)
if failed:
raise CreatorsCreateFailed([
prepare_failed_creator_operation_info(
identifier, label, exc_info, add_traceback
)
])
if not success:
fail_info = prepare_failed_creator_operation_info(
identifier, label, exc_info, add_traceback
)
return result, fail_info
def create_with_unified_error(self, identifier, *args, **kwargs):
"""Trigger create but raise only one error if anything fails.
Added to raise unified exception. Capture any possible issues and
reraise it with unified information.
Args:
identifier (str): Identifier of creator.
*args (Tuple[Any]): Arguments for create method.
**kwargs (Dict[Any, Any]): Keyword argument for create method.
Raises:
CreatorsCreateFailed: When creation fails due to any possible
reason. If anything goes wrong this is only possible exception
the method should raise.
"""
result, fail_info = self._create_with_unified_error(
identifier, None, *args, **kwargs
)
if fail_info is not None:
raise CreatorsCreateFailed([fail_info])
return result
def _remove_instance(self, instance):
self._instances_by_id.pop(instance.id, None)
@ -1968,38 +2084,12 @@ class CreateContext:
Reset instances if any autocreator executed properly.
"""
error_message = "Failed to run AutoCreator with identifier \"{}\". {}"
failed_info = []
for creator in self.sorted_autocreators:
identifier = creator.identifier
label = creator.label
failed = False
add_traceback = False
try:
creator.create()
except CreatorError:
failed = True
exc_info = sys.exc_info()
self.log.warning(error_message.format(identifier, exc_info[1]))
# Use bare except because some hosts raise their exceptions that
# do not inherit from python's `BaseException`
except:
failed = True
add_traceback = True
exc_info = sys.exc_info()
self.log.warning(
error_message.format(identifier, ""),
exc_info=True
)
if failed:
failed_info.append(
prepare_failed_creator_operation_info(
identifier, label, exc_info, add_traceback
)
)
_, fail_info = self._create_with_unified_error(identifier, creator)
if fail_info is not None:
failed_info.append(fail_info)
if failed_info:
raise CreatorsCreateFailed(failed_info)

View file

@ -28,7 +28,6 @@ from openpype.lib import (
TemplateUnsolved,
)
from openpype.pipeline import (
schema,
legacy_io,
Anatomy,
)
@ -643,7 +642,10 @@ def get_representation_path(representation, root=None, dbcon=None):
def path_from_config():
try:
version_, subset, asset, project = dbcon.parenthood(representation)
project_name = dbcon.active_project()
version_, subset, asset, project = get_representation_parents(
project_name, representation
)
except ValueError:
log.debug(
"Representation %s wasn't found in database, "

View file

@ -10,11 +10,17 @@ import six
import pyblish.plugin
import pyblish.api
from openpype.lib import Logger, filter_profiles
from openpype.lib import (
Logger,
filter_profiles
)
from openpype.settings import (
get_project_settings,
get_system_settings,
)
from openpype.pipeline import (
tempdir
)
from .contants import (
DEFAULT_PUBLISH_TEMPLATE,
@ -595,7 +601,7 @@ def context_plugin_should_run(plugin, context):
Args:
plugin (pyblish.api.Plugin): Plugin with filters.
context (pyblish.api.Context): Pyblish context with insances.
context (pyblish.api.Context): Pyblish context with instances.
Returns:
bool: Context plugin should run based on valid instances.
@ -609,12 +615,21 @@ def context_plugin_should_run(plugin, context):
def get_instance_staging_dir(instance):
"""Unified way how staging dir is stored and created on instances.
First check if 'stagingDir' is already set in instance data. If there is
not create new in tempdir.
First check if 'stagingDir' is already set in instance data.
In case there already is new tempdir will not be created.
It also supports `OPENPYPE_TMPDIR`, so studio can define own temp
shared repository per project or even per more granular context.
Template formatting is supported also with optional keys. Folder is
created in case it doesn't exists.
Available anatomy formatting keys:
- root[work | <root name key>]
- project[name | code]
Note:
Staging dir does not have to be necessarily in tempdir so be carefull
about it's usage.
Staging dir does not have to be necessarily in tempdir so be careful
about its usage.
Args:
instance (pyblish.lib.Instance): Instance for which we want to get
@ -623,12 +638,27 @@ def get_instance_staging_dir(instance):
Returns:
str: Path to staging dir of instance.
"""
staging_dir = instance.data.get('stagingDir')
if staging_dir:
return staging_dir
staging_dir = instance.data.get("stagingDir")
if not staging_dir:
anatomy = instance.context.data.get("anatomy")
# get customized tempdir path from `OPENPYPE_TMPDIR` env var
custom_temp_dir = tempdir.create_custom_tempdir(
anatomy.project_name, anatomy)
if custom_temp_dir:
staging_dir = os.path.normpath(
tempfile.mkdtemp(
prefix="pyblish_tmp_",
dir=custom_temp_dir
)
)
else:
staging_dir = os.path.normpath(
tempfile.mkdtemp(prefix="pyblish_tmp_")
)
instance.data["stagingDir"] = staging_dir
instance.data['stagingDir'] = staging_dir
return staging_dir

View file

@ -0,0 +1,59 @@
"""
Temporary folder operations
"""
import os
from openpype.lib import StringTemplate
from openpype.pipeline import Anatomy
def create_custom_tempdir(project_name, anatomy=None):
""" Create custom tempdir
Template path formatting is supporting:
- optional key formatting
- available keys:
- root[work | <root name key>]
- project[name | code]
Args:
project_name (str): project name
anatomy (openpype.pipeline.Anatomy)[optional]: Anatomy object
Returns:
str | None: formatted path or None
"""
openpype_tempdir = os.getenv("OPENPYPE_TMPDIR")
if not openpype_tempdir:
return
custom_tempdir = None
if "{" in openpype_tempdir:
if anatomy is None:
anatomy = Anatomy(project_name)
# create base formate data
data = {
"root": anatomy.roots,
"project": {
"name": anatomy.project_name,
"code": anatomy.project_code,
}
}
# path is anatomy template
custom_tempdir = StringTemplate.format_template(
openpype_tempdir, data).normalized()
else:
# path is absolute
custom_tempdir = openpype_tempdir
# create the dir path if it doesn't exists
if not os.path.exists(custom_tempdir):
try:
# create it if it doesn't exists
os.makedirs(custom_tempdir)
except IOError as error:
raise IOError(
"Path couldn't be created: {}".format(error)) from error
return custom_tempdir

View file

@ -32,7 +32,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
thumbnail_paths_by_instance_id.get(None)
)
project_name = create_context.project_name
project_name = create_context.get_current_project_name()
if project_name:
context.data["projectName"] = project_name
@ -53,11 +53,15 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
context.data.update(create_context.context_data_to_store())
context.data["newPublishing"] = True
# Update context data
for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"):
value = create_context.dbcon.Session.get(key)
if value is not None:
legacy_io.Session[key] = value
os.environ[key] = value
asset_name = create_context.get_current_asset_name()
task_name = create_context.get_current_task_name()
for key, value in (
("AVALON_PROJECT", project_name),
("AVALON_ASSET", asset_name),
("AVALON_TASK", task_name)
):
legacy_io.Session[key] = value
os.environ[key] = value
def create_instance(
self,

View file

@ -246,6 +246,7 @@
"sourcetype": "python",
"title": "Gizmo Note",
"command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')",
"icon": "",
"shortcut": ""
}
]

View file

@ -17,6 +17,11 @@
"key": "menu",
"label": "OpenPype Menu shortcuts",
"children": [
{
"type": "text",
"key": "create",
"label": "Create..."
},
{
"type": "text",
"key": "publish",
@ -288,4 +293,4 @@
"name": "schema_publish_gui_filter"
}
]
}
}

View file

@ -1573,20 +1573,19 @@ class PublisherController(BasePublisherController):
Handle both creation and publishing parts.
Args:
dbcon (AvalonMongoDB): Connection to mongo with context.
headless (bool): Headless publishing. ATM not implemented or used.
"""
_log = None
def __init__(self, dbcon=None, headless=False):
def __init__(self, headless=False):
super(PublisherController, self).__init__()
self._host = registered_host()
self._headless = headless
self._create_context = CreateContext(
self._host, dbcon, headless=headless, reset=False
self._host, headless=headless, reset=False
)
self._publish_plugins_proxy = None
@ -1740,7 +1739,7 @@ class PublisherController(BasePublisherController):
self._create_context.reset_preparation()
# Reset avalon context
self._create_context.reset_avalon_context()
self._create_context.reset_current_context()
self._asset_docs_cache.reset()
@ -2004,9 +2003,10 @@ class PublisherController(BasePublisherController):
success = True
try:
self._create_context.create(
self._create_context.create_with_unified_error(
creator_identifier, subset_name, instance_data, options
)
except CreatorsOperationFailed as exc:
success = False
self._emit_event(

View file

@ -566,24 +566,24 @@ class PublisherWindow(QtWidgets.QDialog):
def _go_to_publish_tab(self):
self._set_current_tab("publish")
def _go_to_details_tab(self):
self._set_current_tab("details")
def _go_to_report_tab(self):
self._set_current_tab("report")
def _go_to_details_tab(self):
self._set_current_tab("details")
def _is_on_create_tab(self):
return self._is_current_tab("create")
def _is_on_publish_tab(self):
return self._is_current_tab("publish")
def _is_on_details_tab(self):
return self._is_current_tab("details")
def _is_on_report_tab(self):
return self._is_current_tab("report")
def _is_on_details_tab(self):
return self._is_current_tab("details")
def _set_publish_overlay_visibility(self, visible):
if visible:
widget = self._publish_overlay
@ -647,16 +647,10 @@ class PublisherWindow(QtWidgets.QDialog):
# otherwise 'create' is used
# - this happens only on first show
if first_reset:
if self._overview_widget.has_items():
self._go_to_publish_tab()
else:
self._go_to_create_tab()
self._go_to_create_tab()
elif (
not self._is_on_create_tab()
and not self._is_on_publish_tab()
):
# If current tab is not 'Create' or 'Publish' go to 'Publish'
elif self._is_on_report_tab():
# Go to 'Publish' tab if is on 'Details' tab
# - this can happen when publishing started and was reset
# at that moment it doesn't make sense to stay at publish
# specific tabs.

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.1-nightly.5"
__version__ = "3.15.1-nightly.6"

View file

@ -0,0 +1,30 @@
---
id: admin_environment
title: Environment
sidebar_label: Environment
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
## OPENPYPE_TMPDIR:
- Custom staging dir directory
- Supports anatomy keys formatting. ex `{root[work]}/{project[name]}/temp`
- supported formatting keys:
- root[work]
- project[name | code]
## OPENPYPE_DEBUG
- setting logger to debug mode
- example value: "1" (to activate)
## OPENPYPE_LOG_LEVEL
- stringified numeric value of log level. [Here for more info](https://docs.python.org/3/library/logging.html#logging-levels)
- example value: "10"
## OPENPYPE_MONGO
- If set it takes precedence over the one set in keyring
- for more details on how to use it go [here](admin_use#check-for-mongodb-database-connection)
## OPENPYPE_USERNAME
- if set it overides system created username

View file

@ -13,18 +13,23 @@ Settings applicable to the full studio.
![general_settings](assets/settings/settings_system_general.png)
**`Studio Name`** - Full name of the studio (can be used as variable on some places)
### Studio Name
Full name of the studio (can be used as variable on some places)
**`Studio Code`** - Studio acronym or a short code (can be used as variable on some places)
### Studio Code
Studio acronym or a short code (can be used as variable on some places)
**`Admin Password`** - After setting admin password, normal user won't have access to OpenPype settings
### Admin Password
After setting admin password, normal user won't have access to OpenPype settings
and Project Manager GUI. Please keep in mind that this is a studio wide password and it is meant purely
as a simple barrier to prevent artists from accidental setting changes.
**`Environment`** - Globally applied environment variables that will be appended to any OpenPype process in the studio.
### Environment
Globally applied environment variables that will be appended to any OpenPype process in the studio.
**`Disk mapping`** - Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
### Disk mapping
- Platform dependent configuration for mapping of virtual disk(s) on an artist's OpenPype machines before OP starts up.
- Uses `subst` command, if configured volume character in `Destination` field already exists, no re-mapping is done for that character(volume).
### FFmpeg and OpenImageIO tools
We bundle FFmpeg tools for all platforms and OpenImageIO tools for Windows and Linux. By default, bundled tools are used, but it is possible to set environment variables `OPENPYPE_FFMPEG_PATHS` and `OPENPYPE_OIIO_PATHS` in system settings environments to look for them in different directory.
@ -171,4 +176,4 @@ In the image before you can see that we set most of the environment variables in
In this example MTOA will automatically will the `MAYA_VERSION`(which is set by Maya Application environment) and `MTOA_VERSION` into the `MTOA` variable. We then use the `MTOA` to set all the other variables needed for it to function within Maya.
![tools](assets/settings/tools_01.png)
All of the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible.
All the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible.

View file

@ -87,6 +87,7 @@ module.exports = {
type: "category",
label: "Configuration",
items: [
"admin_environment",
"admin_settings",
"admin_settings_system",
"admin_settings_project_anatomy",

View file

@ -7180,9 +7180,9 @@ typedarray-to-buffer@^3.1.5:
is-typedarray "^1.0.0"
ua-parser-js@^0.7.30:
version "0.7.31"
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.31.tgz#649a656b191dffab4f21d5e053e27ca17cbff5c6"
integrity sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ==
version "0.7.33"
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532"
integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw==
unherit@^1.0.4:
version "1.1.3"