replace avalon imports in modules

This commit is contained in:
Jakub Trllo 2022-04-14 12:35:27 +02:00
parent 2f02e03995
commit e75170d5c6
13 changed files with 63 additions and 55 deletions

View file

@ -3,10 +3,9 @@ import attr
import getpass
import pyblish.api
from avalon import api
from openpype.lib import env_value_to_bool
from openpype.lib.delivery import collect_frames
from openpype.pipeline import legacy_io
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
@ -89,7 +88,7 @@ class AfterEffectsSubmitDeadline(
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
if key in os.environ}, **legacy_io.Session)
for key in keys:
val = environment.get(key)
if val:

View file

@ -8,8 +8,8 @@ import re
import attr
import pyblish.api
from avalon import api
from openpype.pipeline import legacy_io
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
@ -282,7 +282,7 @@ class HarmonySubmitDeadline(
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
if key in os.environ}, **legacy_io.Session)
for key in keys:
val = environment.get(key)
if val:

View file

@ -4,10 +4,10 @@ import json
import requests
import hou
from avalon import api, io
import pyblish.api
from openpype.pipeline import legacy_io
class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
"""Submit Houdini scene to perform a local publish in Deadline.
@ -35,7 +35,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
), "Errors found, aborting integration.."
# Deadline connection
AVALON_DEADLINE = api.Session.get(
AVALON_DEADLINE = legacy_io.Session.get(
"AVALON_DEADLINE", "http://localhost:8082"
)
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
@ -55,7 +55,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
scenename = os.path.basename(scene)
# Get project code
project = io.find_one({"type": "project"})
project = legacy_io.find_one({"type": "project"})
code = project["data"].get("code", project["name"])
job_name = "{scene} [PUBLISH]".format(scene=scenename)
@ -137,7 +137,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
environment = dict(
{key: os.environ[key] for key in keys if key in os.environ},
**api.Session
**legacy_io.Session
)
environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instances)

View file

@ -3,12 +3,12 @@ import json
import getpass
import requests
from avalon import api
import pyblish.api
import hou
from openpype.pipeline import legacy_io
class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
"""Submit Solaris USD Render ROPs to Deadline.
@ -106,7 +106,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
if key in os.environ}, **legacy_io.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
@ -140,7 +140,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
def submit(self, instance, payload):
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
AVALON_DEADLINE = legacy_io.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"

View file

@ -32,10 +32,10 @@ import requests
from maya import cmds
from avalon import api
import pyblish.api
from openpype.hosts.maya.api import lib
from openpype.pipeline import legacy_io
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
@ -488,7 +488,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
keys.append("OPENPYPE_MONGO")
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
if key in os.environ}, **legacy_io.Session)
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_MAYA_VERSION"] = cmds.about(v=True)
# to recognize job from PYPE for turning Event On/Off

View file

@ -4,10 +4,10 @@ import json
import getpass
import requests
from avalon import api
import pyblish.api
import nuke
from openpype.pipeline import legacy_io
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
@ -266,7 +266,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
keys += self.env_allowed_keys
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
if key in os.environ}, **legacy_io.Session)
for _path in os.environ:
if _path.lower().startswith('openpype_'):

View file

@ -7,13 +7,14 @@ import re
from copy import copy, deepcopy
import requests
import clique
import openpype.api
from avalon import api, io
import pyblish.api
from openpype.pipeline import get_representation_path
import openpype.api
from openpype.pipeline import (
get_representation_path,
legacy_io,
)
def get_resources(version, extension=None):
@ -22,7 +23,7 @@ def get_resources(version, extension=None):
if extension:
query["name"] = extension
representation = io.find_one(query)
representation = legacy_io.find_one(query)
assert representation, "This is a bug"
directory = get_representation_path(representation)
@ -221,9 +222,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
self._create_metadata_path(instance)
environment = job["Props"].get("Env", {})
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["AVALON_ASSET"] = io.Session["AVALON_ASSET"]
environment["AVALON_TASK"] = io.Session["AVALON_TASK"]
environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"]
environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"]
environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_USERNAME"] = instance.context.data["user"]
@ -663,7 +664,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if hasattr(instance, "_log"):
data['_log'] = instance._log
asset = data.get("asset") or api.Session["AVALON_ASSET"]
asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"]
subset = data.get("subset")
start = instance.data.get("frameStart")
@ -955,7 +956,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"intent": context.data.get("intent"),
"comment": context.data.get("comment"),
"job": render_job or None,
"session": api.Session.copy(),
"session": legacy_io.Session.copy(),
"instances": instances
}
@ -1063,7 +1064,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
else:
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
project_name = api.Session["AVALON_PROJECT"]
project_name = legacy_io.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."

View file

@ -4,8 +4,11 @@ import traceback
import json
import ftrack_api
from avalon import io, api
from openpype.pipeline import get_representation_path
from openpype.pipeline import (
get_representation_path,
legacy_io,
)
from openpype_modules.ftrack.lib import BaseAction, statics_icon
@ -253,8 +256,8 @@ class RVAction(BaseAction):
)["version"]["asset"]["parent"]["link"][0]
project = session.get(link["type"], link["id"])
os.environ["AVALON_PROJECT"] = project["name"]
api.Session["AVALON_PROJECT"] = project["name"]
io.install()
legacy_io.Session["AVALON_PROJECT"] = project["name"]
legacy_io.install()
location = ftrack_api.Session().pick_location()
@ -278,22 +281,22 @@ class RVAction(BaseAction):
if online_source:
continue
asset = io.find_one({"type": "asset", "name": parent_name})
subset = io.find_one(
asset = legacy_io.find_one({"type": "asset", "name": parent_name})
subset = legacy_io.find_one(
{
"type": "subset",
"name": component["version"]["asset"]["name"],
"parent": asset["_id"]
}
)
version = io.find_one(
version = legacy_io.find_one(
{
"type": "version",
"name": component["version"]["version"],
"parent": subset["_id"]
}
)
representation = io.find_one(
representation = legacy_io.find_one(
{
"type": "representation",
"parent": version["_id"],
@ -301,7 +304,7 @@ class RVAction(BaseAction):
}
)
if representation is None:
representation = io.find_one(
representation = legacy_io.find_one(
{
"type": "representation",
"parent": version["_id"],

View file

@ -1,6 +1,7 @@
import logging
import pyblish.api
import avalon.api
from openpype.pipeline import legacy_io
class CollectFtrackApi(pyblish.api.ContextPlugin):
@ -23,9 +24,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
# Collect task
project_name = avalon.api.Session["AVALON_PROJECT"]
asset_name = avalon.api.Session["AVALON_ASSET"]
task_name = avalon.api.Session["AVALON_TASK"]
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
# Find project entity
project_query = 'Project where full_name is "{0}"'.format(project_name)

View file

@ -6,8 +6,8 @@ Provides:
instance -> families ([])
"""
import pyblish.api
import avalon.api
from openpype.pipeline import legacy_io
from openpype.lib.plugin_tools import filter_profiles
@ -35,8 +35,8 @@ class CollectFtrackFamily(pyblish.api.InstancePlugin):
return
task_name = instance.data.get("task",
avalon.api.Session["AVALON_TASK"])
host_name = avalon.api.Session["AVALON_APP"]
legacy_io.Session["AVALON_TASK"])
host_name = legacy_io.Session["AVALON_APP"]
family = instance.data["family"]
filtering_criteria = {

View file

@ -2,7 +2,8 @@ import sys
import collections
import six
import pyblish.api
from avalon import io
from openpype.pipeline import legacy_io
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
CUST_ATTR_AUTO_SYNC = "avalon_auto_sync"
@ -80,8 +81,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
auto_sync_state = project[
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
if not io.Session:
io.install()
if not legacy_io.Session:
legacy_io.install()
self.ft_project = None
@ -271,7 +272,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# Create new links.
for input in entity_data.get("inputs", []):
input_id = io.find_one({"_id": input})["data"]["ftrackId"]
input_id = legacy_io.find_one({"_id": input})["data"]["ftrackId"]
assetbuild = self.session.get("AssetBuild", input_id)
self.log.debug(
"Creating link from {0} to {1}".format(

View file

@ -7,7 +7,8 @@ import json
from pprint import pformat
import pyblish.api
from avalon import api
from openpype.pipeline import legacy_io
def collect(root,
@ -127,7 +128,7 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
session = metadata.get("session")
if session:
self.log.info("setting session using metadata")
api.Session.update(session)
legacy_io.Session.update(session)
os.environ.update(session)
else:
@ -187,7 +188,9 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
"family": families[0], # backwards compatibility / pyblish
"families": list(families),
"subset": subset,
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
"asset": data.get(
"asset", legacy_io.Session["AVALON_ASSET"]
),
"stagingDir": root,
"frameStart": start,
"frameEnd": end,

View file

@ -1,7 +1,7 @@
from avalon import io
import pyblish.api
from openpype.lib.profiles_filtering import filter_profiles
from openpype.pipeline import legacy_io
class CollectSlackFamilies(pyblish.api.InstancePlugin):
@ -18,7 +18,7 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin):
profiles = None
def process(self, instance):
task_name = io.Session.get("AVALON_TASK")
task_name = legacy_io.Session.get("AVALON_TASK")
family = self.main_family_from_instance(instance)
key_values = {
"families": family,