mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #3774 from pypeclub/bugfix/user_username_keys
General: User keys during publishing
This commit is contained in:
commit
a25c6e5cd7
6 changed files with 29 additions and 25 deletions
|
|
@ -1,5 +1,8 @@
|
|||
"""Loads publishing context from json and continues in publish process.
|
||||
|
||||
Should run before 'CollectAnatomyContextData' so the user on context is
|
||||
changed before it's stored to context anatomy data or instance anatomy data.
|
||||
|
||||
Requires:
|
||||
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
|
||||
|
||||
|
|
@ -13,7 +16,7 @@ import os
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectUsername(pyblish.api.ContextPlugin):
|
||||
class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Translates user email to Ftrack username.
|
||||
|
||||
|
|
@ -32,10 +35,8 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
hosts = ["webpublisher", "photoshop"]
|
||||
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
|
||||
|
||||
_context = None
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("CollectUsername")
|
||||
self.log.info("{}".format(self.__class__.__name__))
|
||||
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
|
||||
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
|
||||
|
||||
|
|
@ -54,12 +55,14 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
session = ftrack_api.Session(auto_connect_event_hub=False)
|
||||
user = session.query("User where email like '{}'".format(user_email))
|
||||
user = session.query(
|
||||
"User where email like '{}'".format(user_email)
|
||||
).first()
|
||||
|
||||
if not user:
|
||||
raise ValueError(
|
||||
"Couldn't find user with {} email".format(user_email))
|
||||
user = user[0]
|
||||
|
||||
username = user.get("username")
|
||||
self.log.debug("Resolved ftrack username:: {}".format(username))
|
||||
os.environ["FTRACK_API_USER"] = username
|
||||
|
|
@ -67,5 +70,4 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
burnin_name = username
|
||||
if '@' in burnin_name:
|
||||
burnin_name = burnin_name[:burnin_name.index('@')]
|
||||
os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name
|
||||
context.data["user"] = burnin_name
|
||||
|
|
|
|||
|
|
@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
Reviews might be large, so allow only adding link to message instead of
|
||||
uploading only.
|
||||
"""
|
||||
|
||||
fill_data = copy.deepcopy(instance.context.data["anatomyData"])
|
||||
|
||||
username = fill_data.get("user")
|
||||
fill_pairs = [
|
||||
("asset", instance.data.get("asset", fill_data.get("asset"))),
|
||||
("subset", instance.data.get("subset", fill_data.get("subset"))),
|
||||
("username", instance.data.get("username",
|
||||
fill_data.get("username"))),
|
||||
("user", username),
|
||||
("username", username),
|
||||
("app", instance.data.get("app", fill_data.get("app"))),
|
||||
("family", instance.data.get("family", fill_data.get("family"))),
|
||||
("version", str(instance.data.get("version",
|
||||
|
|
@ -110,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
if review_path:
|
||||
fill_pairs.append(("review_filepath", review_path))
|
||||
|
||||
task_data = instance.data.get("task")
|
||||
if not task_data:
|
||||
task_data = fill_data.get("task")
|
||||
for key, value in task_data.items():
|
||||
fill_key = "task[{}]".format(key)
|
||||
fill_pairs.append((fill_key, value))
|
||||
fill_pairs.append(("task", task_data["name"]))
|
||||
task_data = fill_data.get("task")
|
||||
if task_data:
|
||||
if (
|
||||
"{task}" in message_templ
|
||||
or "{Task}" in message_templ
|
||||
or "{TASK}" in message_templ
|
||||
):
|
||||
fill_pairs.append(("task", task_data["name"]))
|
||||
|
||||
else:
|
||||
for key, value in task_data.items():
|
||||
fill_key = "task[{}]".format(key)
|
||||
fill_pairs.append((fill_key, value))
|
||||
|
||||
self.log.debug("fill_pairs ::{}".format(fill_pairs))
|
||||
multiple_case_variants = prepare_template_data(fill_pairs)
|
||||
|
|
|
|||
|
|
@ -488,12 +488,6 @@ class ExtractBurnin(publish.Extractor):
|
|||
"frame_end_handle": frame_end_handle
|
||||
}
|
||||
|
||||
# use explicit username for webpublishes as rewriting
|
||||
# OPENPYPE_USERNAME might have side effects
|
||||
webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME")
|
||||
if webpublish_user_name:
|
||||
burnin_data["username"] = webpublish_user_name
|
||||
|
||||
self.log.debug(
|
||||
"Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# the database even if not used by the destination template
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "version", "representation",
|
||||
"family", "hierarchy", "username", "output"
|
||||
"family", "hierarchy", "username", "user", "output"
|
||||
]
|
||||
skip_host_families = []
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
ignored_representation_names = []
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "representation",
|
||||
"family", "hierarchy", "task", "username"
|
||||
"family", "hierarchy", "task", "username", "user"
|
||||
]
|
||||
# QUESTION/TODO this process should happen on server if crashed due to
|
||||
# permissions error on files (files were used or user didn't have perms)
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
exclude_families = ["render.farm"]
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "version", "representation",
|
||||
"family", "hierarchy", "task", "username"
|
||||
"family", "hierarchy", "task", "username", "user"
|
||||
]
|
||||
default_template_name = "publish"
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue