[Automated] Merged develop into main

This commit is contained in:
ynbot 2023-01-11 04:27:13 +01:00 committed by GitHub
commit 550cac347d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 804 additions and 138 deletions

View file

@ -76,6 +76,18 @@ class HostBase(object):
pass
def install(self):
"""Install host specific functionality.
This is where should be added menu with tools, registered callbacks
and other host integration initialization.
It is called automatically when 'openpype.pipeline.install_host' is
triggered.
"""
pass
@property
def log(self):
if self._log is None:

View file

@ -0,0 +1,152 @@
import os
import sys
from maya import cmds
import pyblish.api
import tempfile
from openpype.lib import run_subprocess
from openpype.pipeline import publish
from openpype.hosts.maya.api import lib
class ExtractImportReference(publish.Extractor):
"""
Extract the scene with imported reference.
The temp scene with imported reference is
published for rendering if this extractor is activated
"""
label = "Extract Import Reference"
order = pyblish.api.ExtractorOrder - 0.48
hosts = ["maya"]
families = ["renderlayer", "workfile"]
optional = True
tmp_format = "_tmp"
@classmethod
def apply_settings(cls, project_setting, system_settings):
cls.active = project_setting["deadline"]["publish"]["MayaSubmitDeadline"]["import_reference"] # noqa
def process(self, instance):
ext_mapping = (
instance.context.data["project_settings"]["maya"]["ext_mapping"]
)
if ext_mapping:
self.log.info("Looking in settings for scene type ...")
# use extension mapping for first family found
for family in self.families:
try:
self.scene_type = ext_mapping[family]
self.log.info(
"Using {} as scene type".format(self.scene_type))
break
except KeyError:
# set scene type to ma
self.scene_type = "ma"
_scene_type = ("mayaAscii"
if self.scene_type == "ma"
else "mayaBinary")
dir_path = self.staging_dir(instance)
# named the file with imported reference
if instance.name == "Main":
return
tmp_name = instance.name + self.tmp_format
current_name = cmds.file(query=True, sceneName=True)
ref_scene_name = "{0}.{1}".format(tmp_name, self.scene_type)
reference_path = os.path.join(dir_path, ref_scene_name)
tmp_path = os.path.dirname(current_name) + "/" + ref_scene_name
self.log.info("Performing extraction..")
# This generates script for mayapy to take care of reference
# importing outside current session. It is passing current scene
# name and destination scene name.
script = ("""
# -*- coding: utf-8 -*-
'''Script to import references to given scene.'''
import maya.standalone
maya.standalone.initialize()
# scene names filled by caller
current_name = "{current_name}"
ref_scene_name = "{ref_scene_name}"
print(">>> Opening {{}} ...".format(current_name))
cmds.file(current_name, open=True, force=True)
print(">>> Processing references")
all_reference = cmds.file(q=True, reference=True) or []
for ref in all_reference:
if cmds.referenceQuery(ref, il=True):
cmds.file(ref, importReference=True)
nested_ref = cmds.file(q=True, reference=True)
if nested_ref:
for new_ref in nested_ref:
if new_ref not in all_reference:
all_reference.append(new_ref)
print(">>> Finish importing references")
print(">>> Saving scene as {{}}".format(ref_scene_name))
cmds.file(rename=ref_scene_name)
cmds.file(save=True, force=True)
print("*** Done")
""").format(current_name=current_name, ref_scene_name=tmp_path)
mayapy_exe = os.path.join(os.getenv("MAYA_LOCATION"), "bin", "mayapy")
if sys.platform == "windows":
mayapy_exe += ".exe"
mayapy_exe = os.path.normpath(mayapy_exe)
# can't use TemporaryNamedFile as that can't be opened in another
# process until handles are closed by context manager.
with tempfile.TemporaryDirectory() as tmp_dir_name:
tmp_script_path = os.path.join(tmp_dir_name, "import_ref.py")
self.log.info("Using script file: {}".format(tmp_script_path))
with open(tmp_script_path, "wt") as tmp:
tmp.write(script)
try:
run_subprocess([mayapy_exe, tmp_script_path])
except Exception:
self.log.error("Import reference failed", exc_info=True)
raise
with lib.maintained_selection():
cmds.select(all=True, noExpand=True)
cmds.file(reference_path,
force=True,
typ=_scene_type,
exportSelected=True,
channels=True,
constraints=True,
shader=True,
expressions=True,
constructionHistory=True)
instance.context.data["currentFile"] = tmp_path
if "files" not in instance.data:
instance.data["files"] = []
instance.data["files"].append(ref_scene_name)
if instance.data.get("representations") is None:
instance.data["representations"] = []
ref_representation = {
"name": self.scene_type,
"ext": self.scene_type,
"files": ref_scene_name,
"stagingDir": os.path.dirname(current_name),
"outputName": "imported"
}
self.log.info("%s" % ref_representation)
instance.data["representations"].append(ref_representation)
self.log.info("Extracted instance '%s' to : '%s'" % (ref_scene_name,
reference_path))

View file

@ -1,7 +1,7 @@
import os
import sys
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
from openpype.tools.utils import host_tools

View file

@ -2,7 +2,7 @@ import re
import uuid
import qargparse
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
from openpype.settings import get_current_project_settings
from openpype.pipeline.context_tools import get_current_project_asset

View file

@ -171,7 +171,6 @@ class ShotMetadataSolver:
_index == 0
and parents[-1]["entity_name"] == parent_name
):
self.log.debug(f" skipping : {parent_name}")
continue
# in case first parent is project then start parents from start
@ -179,7 +178,6 @@ class ShotMetadataSolver:
_index == 0
and parent_token_type == "Project"
):
self.log.debug("rebuilding parents from scratch")
project_parent = parents[0]
parents = [project_parent]
continue
@ -189,8 +187,6 @@ class ShotMetadataSolver:
"entity_name": parent_name
})
self.log.debug(f"__ parents: {parents}")
return parents
def _create_hierarchy_path(self, parents):
@ -297,7 +293,6 @@ class ShotMetadataSolver:
Returns:
(str, dict): shot name and hierarchy data
"""
self.log.info(f"_ source_data: {source_data}")
tasks = {}
asset_doc = source_data["selected_asset_doc"]

View file

@ -1,6 +1,5 @@
import os
from copy import deepcopy
from pprint import pformat
import opentimelineio as otio
from openpype.client import (
get_asset_by_name,
@ -13,9 +12,7 @@ from openpype.hosts.traypublisher.api.plugin import (
from openpype.hosts.traypublisher.api.editorial import (
ShotMetadataSolver
)
from openpype.pipeline import CreatedInstance
from openpype.lib import (
get_ffprobe_data,
convert_ffprobe_fps_value,
@ -70,14 +67,12 @@ class EditorialClipInstanceCreatorBase(HiddenTrayPublishCreator):
host_name = "traypublisher"
def create(self, instance_data, source_data=None):
self.log.info(f"instance_data: {instance_data}")
subset_name = instance_data["subset"]
# Create new instance
new_instance = CreatedInstance(
self.family, subset_name, instance_data, self
)
self.log.info(f"instance_data: {pformat(new_instance.data)}")
self._store_new_instance(new_instance)
@ -223,8 +218,6 @@ or updating already created. Publishing will create OTIO file.
asset_name = instance_data["asset"]
asset_doc = get_asset_by_name(self.project_name, asset_name)
self.log.info(pre_create_data["fps"])
if pre_create_data["fps"] == "from_selection":
# get asset doc data attributes
fps = asset_doc["data"]["fps"]
@ -239,34 +232,43 @@ or updating already created. Publishing will create OTIO file.
sequence_path_data = pre_create_data["sequence_filepath_data"]
media_path_data = pre_create_data["media_filepaths_data"]
sequence_path = self._get_path_from_file_data(sequence_path_data)
sequence_paths = self._get_path_from_file_data(
sequence_path_data, multi=True)
media_path = self._get_path_from_file_data(media_path_data)
# get otio timeline
otio_timeline = self._create_otio_timeline(
sequence_path, fps)
first_otio_timeline = None
for seq_path in sequence_paths:
# get otio timeline
otio_timeline = self._create_otio_timeline(
seq_path, fps)
# Create all clip instances
clip_instance_properties.update({
"fps": fps,
"parent_asset_name": asset_name,
"variant": instance_data["variant"]
})
# Create all clip instances
clip_instance_properties.update({
"fps": fps,
"parent_asset_name": asset_name,
"variant": instance_data["variant"]
})
# create clip instances
self._get_clip_instances(
otio_timeline,
media_path,
clip_instance_properties,
family_presets=allowed_family_presets
# create clip instances
self._get_clip_instances(
otio_timeline,
media_path,
clip_instance_properties,
allowed_family_presets,
os.path.basename(seq_path),
first_otio_timeline
)
)
if not first_otio_timeline:
# assing otio timeline for multi file to layer
first_otio_timeline = otio_timeline
# create otio editorial instance
self._create_otio_instance(
subset_name, instance_data,
sequence_path, media_path,
otio_timeline
subset_name,
instance_data,
seq_path, media_path,
first_otio_timeline
)
def _create_otio_instance(
@ -317,14 +319,14 @@ or updating already created. Publishing will create OTIO file.
kwargs["rate"] = fps
kwargs["ignore_timecode_mismatch"] = True
self.log.info(f"kwargs: {kwargs}")
return otio.adapters.read_from_file(sequence_path, **kwargs)
def _get_path_from_file_data(self, file_path_data):
def _get_path_from_file_data(self, file_path_data, multi=False):
"""Converting creator path data to single path string
Args:
file_path_data (FileDefItem): creator path data inputs
multi (bool): switch to multiple files mode
Raises:
FileExistsError: in case nothing had been set
@ -332,23 +334,29 @@ or updating already created. Publishing will create OTIO file.
Returns:
str: path string
"""
# TODO: just temporarly solving only one media file
if isinstance(file_path_data, list):
file_path_data = file_path_data.pop()
return_path_list = []
if len(file_path_data["filenames"]) == 0:
if isinstance(file_path_data, list):
return_path_list = [
os.path.join(f["directory"], f["filenames"][0])
for f in file_path_data
]
if not return_path_list:
raise FileExistsError(
f"File path was not added: {file_path_data}")
return os.path.join(
file_path_data["directory"], file_path_data["filenames"][0])
return return_path_list if multi else return_path_list[0]
def _get_clip_instances(
self,
otio_timeline,
media_path,
instance_data,
family_presets
family_presets,
sequence_file_name,
first_otio_timeline=None
):
"""Helping function fro creating clip instance
@ -368,17 +376,15 @@ or updating already created. Publishing will create OTIO file.
media_data = self._get_media_source_metadata(media_path)
for track in tracks:
self.log.debug(f"track.name: {track.name}")
track.name = f"{sequence_file_name} - {otio_timeline.name}"
try:
track_start_frame = (
abs(track.source_range.start_time.value)
)
self.log.debug(f"track_start_frame: {track_start_frame}")
track_start_frame -= self.timeline_frame_start
except AttributeError:
track_start_frame = 0
self.log.debug(f"track_start_frame: {track_start_frame}")
for clip in track.each_child():
if not self._validate_clip_for_processing(clip):
@ -400,10 +406,6 @@ or updating already created. Publishing will create OTIO file.
"instance_label": None,
"instance_id": None
}
self.log.info((
"Creating subsets from presets: \n"
f"{pformat(family_presets)}"
))
for _fpreset in family_presets:
# exclude audio family if no audio stream
@ -419,7 +421,10 @@ or updating already created. Publishing will create OTIO file.
deepcopy(base_instance_data),
parenting_data
)
self.log.debug(f"{pformat(dict(instance.data))}")
# add track to first otioTimeline if it is in input args
if first_otio_timeline:
first_otio_timeline.tracks.append(deepcopy(track))
def _restore_otio_source_range(self, otio_clip):
"""Infusing source range.
@ -460,7 +465,6 @@ or updating already created. Publishing will create OTIO file.
target_url=media_path,
available_range=available_range
)
otio_clip.media_reference = media_reference
def _get_media_source_metadata(self, path):
@ -481,7 +485,6 @@ or updating already created. Publishing will create OTIO file.
media_data = get_ffprobe_data(
path, self.log
)
self.log.debug(f"__ media_data: {pformat(media_data)}")
# get video stream data
video_stream = media_data["streams"][0]
@ -589,9 +592,6 @@ or updating already created. Publishing will create OTIO file.
# get variant name from preset or from inharitance
_variant_name = preset.get("variant") or variant_name
self.log.debug(f"__ family: {family}")
self.log.debug(f"__ preset: {preset}")
# subset name
subset_name = "{}{}".format(
family, _variant_name.capitalize()
@ -722,17 +722,13 @@ or updating already created. Publishing will create OTIO file.
clip_in += track_start_frame
clip_out = otio_clip.range_in_parent().end_time_inclusive().value
clip_out += track_start_frame
self.log.info(f"clip_in: {clip_in} | clip_out: {clip_out}")
# add offset in case there is any
self.log.debug(f"__ timeline_offset: {timeline_offset}")
if timeline_offset:
clip_in += timeline_offset
clip_out += timeline_offset
clip_duration = otio_clip.duration().value
self.log.info(f"clip duration: {clip_duration}")
source_in = otio_clip.trimmed_range().start_time.value
source_out = source_in + clip_duration
@ -762,7 +758,6 @@ or updating already created. Publishing will create OTIO file.
Returns:
list: lit of dict with preset items
"""
self.log.debug(f"__ pre_create_data: {pre_create_data}")
return [
{"family": "shot"},
*[
@ -833,7 +828,7 @@ or updating already created. Publishing will create OTIO file.
".fcpxml"
],
allow_sequences=False,
single_item=True,
single_item=False,
label="Sequence file",
),
FileDef(

View file

@ -33,8 +33,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
]
def process(self, instance):
self.log.debug(pformat(instance.data))
creator_identifier = instance.data["creator_identifier"]
if "editorial" not in creator_identifier:
return
@ -82,7 +80,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
]
otio_clip = clips.pop()
self.log.debug(f"__ otioclip.parent: {otio_clip.parent}")
return otio_clip
@ -172,7 +169,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
}
parents = instance.data.get('parents', [])
self.log.debug(f"parents: {pformat(parents)}")
actual = {name: in_info}
@ -190,7 +186,6 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
# adding hierarchy context to instance
context.data["hierarchyContext"] = final_context
self.log.debug(pformat(final_context))
def _update_dict(self, ex_dict, new_dict):
""" Recursion function

View file

@ -18,6 +18,7 @@ from .pipeline import (
show_tools_popup,
instantiate,
UnrealHost,
maintained_selection
)
__all__ = [
@ -36,4 +37,5 @@ __all__ = [
"show_tools_popup",
"instantiate",
"UnrealHost",
"maintained_selection"
]

View file

@ -2,6 +2,7 @@
import os
import logging
from typing import List
from contextlib import contextmanager
import semver
import pyblish.api
@ -447,3 +448,16 @@ def get_subsequences(sequence: unreal.LevelSequence):
if subscene_track is not None and subscene_track.get_sections():
return subscene_track.get_sections()
return []
@contextmanager
def maintained_selection():
"""Stub to be either implemented or replaced.
This is needed for old publisher implementation, but
it is not supported (yet) in UE.
"""
try:
yield
finally:
pass

View file

@ -0,0 +1,61 @@
"""Create UAsset."""
from pathlib import Path
import unreal
from openpype.hosts.unreal.api import pipeline
from openpype.pipeline import LegacyCreator
class CreateUAsset(LegacyCreator):
"""UAsset."""
name = "UAsset"
label = "UAsset"
family = "uasset"
icon = "cube"
root = "/Game/OpenPype"
suffix = "_INS"
def __init__(self, *args, **kwargs):
super(CreateUAsset, self).__init__(*args, **kwargs)
def process(self):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
subset = self.data["subset"]
path = f"{self.root}/PublishInstances/"
unreal.EditorAssetLibrary.make_directory(path)
selection = []
if (self.options or {}).get("useSelection"):
sel_objects = unreal.EditorUtilityLibrary.get_selected_assets()
selection = [a.get_path_name() for a in sel_objects]
if len(selection) != 1:
raise RuntimeError("Please select only one object.")
obj = selection[0]
asset = ar.get_asset_by_object_path(obj).get_asset()
sys_path = unreal.SystemLibrary.get_system_path(asset)
if not sys_path:
raise RuntimeError(
f"{Path(obj).name} is not on the disk. Likely it needs to"
"be saved first.")
if Path(sys_path).suffix != ".uasset":
raise RuntimeError(f"{Path(sys_path).name} is not a UAsset.")
unreal.log("selection: {}".format(selection))
container_name = f"{subset}{self.suffix}"
pipeline.create_publish_instance(
instance=container_name, path=path)
data = self.data.copy()
data["members"] = selection
pipeline.imprint(f"{path}/{container_name}", data)

View file

@ -0,0 +1,145 @@
# -*- coding: utf-8 -*-
"""Load UAsset."""
from pathlib import Path
import shutil
from openpype.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID
)
from openpype.hosts.unreal.api import plugin
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
import unreal # noqa
class UAssetLoader(plugin.Loader):
"""Load UAsset."""
families = ["uasset"]
label = "Load UAsset"
representations = ["uasset"]
icon = "cube"
color = "orange"
def load(self, context, name, namespace, options):
"""Load and containerise representation into Content Browser.
Args:
context (dict): application context
name (str): subset name
namespace (str): in Unreal this is basically path to container.
This is not passed here, so namespace is set
by `containerise()` because only then we know
real path.
options (dict): Those would be data to be imprinted. This is not
used now, data are imprinted by `containerise()`.
Returns:
list(str): list of container content
"""
# Create directory for asset and OpenPype container
root = "/Game/OpenPype/Assets"
asset = context.get('asset').get('name')
suffix = "_CON"
if asset:
asset_name = "{}_{}".format(asset, name)
else:
asset_name = "{}".format(name)
tools = unreal.AssetToolsHelpers().get_asset_tools()
asset_dir, container_name = tools.create_unique_asset_name(
"{}/{}/{}".format(root, asset, name), suffix="")
container_name += suffix
unreal.EditorAssetLibrary.make_directory(asset_dir)
destination_path = asset_dir.replace(
"/Game",
Path(unreal.Paths.project_content_dir()).as_posix(),
1)
shutil.copy(self.fname, f"{destination_path}/{name}.uasset")
# Create Asset Container
unreal_pipeline.create_container(
container=container_name, path=asset_dir)
data = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"asset": asset,
"namespace": asset_dir,
"container_name": container_name,
"asset_name": asset_name,
"loader": str(self.__class__.__name__),
"representation": context["representation"]["_id"],
"parent": context["representation"]["parent"],
"family": context["representation"]["context"]["family"]
}
unreal_pipeline.imprint(
"{}/{}".format(asset_dir, container_name), data)
asset_content = unreal.EditorAssetLibrary.list_assets(
asset_dir, recursive=True, include_folder=True
)
for a in asset_content:
unreal.EditorAssetLibrary.save_asset(a)
return asset_content
def update(self, container, representation):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
asset_dir = container["namespace"]
name = representation["context"]["subset"]
destination_path = asset_dir.replace(
"/Game",
Path(unreal.Paths.project_content_dir()).as_posix(),
1)
asset_content = unreal.EditorAssetLibrary.list_assets(
asset_dir, recursive=False, include_folder=True
)
for asset in asset_content:
obj = ar.get_asset_by_object_path(asset).get_asset()
if not obj.get_class().get_name() == 'AssetContainer':
unreal.EditorAssetLibrary.delete_asset(asset)
update_filepath = get_representation_path(representation)
shutil.copy(update_filepath, f"{destination_path}/{name}.uasset")
container_path = "{}/{}".format(container["namespace"],
container["objectName"])
# update metadata
unreal_pipeline.imprint(
container_path,
{
"representation": str(representation["_id"]),
"parent": str(representation["parent"])
})
asset_content = unreal.EditorAssetLibrary.list_assets(
asset_dir, recursive=True, include_folder=True
)
for a in asset_content:
unreal.EditorAssetLibrary.save_asset(a)
def remove(self, container):
path = container["namespace"]
parent_path = Path(path).parent.as_posix()
unreal.EditorAssetLibrary.delete_directory(path)
asset_content = unreal.EditorAssetLibrary.list_assets(
parent_path, recursive=False
)
if len(asset_content) == 0:
unreal.EditorAssetLibrary.delete_directory(parent_path)

View file

@ -25,9 +25,13 @@ class CollectInstances(pyblish.api.ContextPlugin):
def process(self, context):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
class_name = ["/Script/OpenPype",
"AssetContainer"] if UNREAL_VERSION.major == 5 and \
UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa
class_name = [
"/Script/OpenPype",
"OpenPypePublishInstance"
] if (
UNREAL_VERSION.major == 5
and UNREAL_VERSION.minor > 0
) else "OpenPypePublishInstance" # noqa
instance_containers = ar.get_assets_by_class(class_name, True)
for container_data in instance_containers:

View file

@ -0,0 +1,42 @@
from pathlib import Path
import shutil
import unreal
from openpype.pipeline import publish
class ExtractUAsset(publish.Extractor):
"""Extract a UAsset."""
label = "Extract UAsset"
hosts = ["unreal"]
families = ["uasset"]
optional = True
def process(self, instance):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
self.log.info("Performing extraction..")
staging_dir = self.staging_dir(instance)
filename = "{}.uasset".format(instance.name)
obj = instance[0]
asset = ar.get_asset_by_object_path(obj).get_asset()
sys_path = unreal.SystemLibrary.get_system_path(asset)
filename = Path(sys_path).name
shutil.copy(sys_path, staging_dir)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'uasset',
'ext': 'uasset',
'files': filename,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)

View file

@ -0,0 +1,41 @@
import unreal
import pyblish.api
class ValidateNoDependencies(pyblish.api.InstancePlugin):
"""Ensure that the uasset has no dependencies
The uasset is checked for dependencies. If there are any, the instance
cannot be published.
"""
order = pyblish.api.ValidatorOrder
label = "Check no dependencies"
families = ["uasset"]
hosts = ["unreal"]
optional = True
def process(self, instance):
ar = unreal.AssetRegistryHelpers.get_asset_registry()
all_dependencies = []
for obj in instance[:]:
asset = ar.get_asset_by_object_path(obj)
dependencies = ar.get_dependencies(
asset.package_name,
unreal.AssetRegistryDependencyOptions(
include_soft_package_references=False,
include_hard_package_references=True,
include_searchable_names=False,
include_soft_management_references=False,
include_hard_management_references=False
))
if dependencies:
for dep in dependencies:
if str(dep).startswith("/Game/"):
all_dependencies.append(str(dep))
if all_dependencies:
raise RuntimeError(
f"Dependencies found: {all_dependencies}")

View file

@ -92,7 +92,9 @@ class FileTransaction(object):
def process(self):
# Backup any existing files
for dst, (src, _) in self._transfers.items():
if dst == src or not os.path.exists(dst):
self.log.debug("Checking file ... {} -> {}".format(src, dst))
path_same = self._same_paths(src, dst)
if path_same or not os.path.exists(dst):
continue
# Backup original file
@ -105,7 +107,8 @@ class FileTransaction(object):
# Copy the files to transfer
for dst, (src, opts) in self._transfers.items():
if dst == src:
path_same = self._same_paths(src, dst)
if path_same:
self.log.debug(
"Source and destionation are same files {} -> {}".format(
src, dst))
@ -182,3 +185,10 @@ class FileTransaction(object):
else:
self.log.critical("An unexpected error occurred.")
six.reraise(*sys.exc_info())
def _same_paths(self, src, dst):
# handles same paths but with C:/project vs c:/project
if os.path.exists(src) and os.path.exists(dst):
return os.path.samefile(src, dst)
return src == dst

View file

@ -400,6 +400,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
import_reference = False
use_published = True
asset_dependencies = False
@ -424,7 +425,11 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
file_path = None
if self.use_published:
file_path = self.from_published_scene()
if not self.import_reference:
file_path = self.from_published_scene()
else:
self.log.info("use the scene with imported reference for rendering") # noqa
file_path = context.data["currentFile"]
# fallback if nothing was set
if not file_path:
@ -516,7 +521,6 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
published.
"""
instance = self._instance
workfile_instance = self._get_workfile_instance(instance.context)
if workfile_instance is None:
@ -524,7 +528,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
# determine published path from Anatomy.
template_data = workfile_instance.data.get("anatomyData")
rep = workfile_instance.data.get("representations")[0]
rep = workfile_instance.data["representations"][0]
template_data["representation"] = rep.get("name")
template_data["ext"] = rep.get("ext")
template_data["comment"] = None

View file

@ -1112,17 +1112,21 @@ class RootItem(FormatObject):
result = False
output = str(path)
root_paths = list(self.cleaned_data.values())
mod_path = self.clean_path(path)
for root_path in root_paths:
for root_os, root_path in self.cleaned_data.items():
# Skip empty paths
if not root_path:
continue
if mod_path.startswith(root_path):
_mod_path = mod_path # reset to original cleaned value
if root_os == "windows":
root_path = root_path.lower()
_mod_path = _mod_path.lower()
if _mod_path.startswith(root_path):
result = True
replacement = "{" + self.full_key() + "}"
output = replacement + mod_path[len(root_path):]
output = replacement + _mod_path[len(root_path):]
break
return (result, output)

View file

@ -4,8 +4,6 @@ import os
import shutil
import pyblish.api
from openpype.pipeline import legacy_io
class CleanUpFarm(pyblish.api.ContextPlugin):
"""Cleans up the staging directory after a successful publish.
@ -23,8 +21,8 @@ class CleanUpFarm(pyblish.api.ContextPlugin):
def process(self, context):
# Get source host from which farm publishing was started
src_host_name = legacy_io.Session.get("AVALON_APP")
self.log.debug("Host name from session is {}".format(src_host_name))
src_host_name = context.data["hostName"]
self.log.debug("Host name from context is {}".format(src_host_name))
# Skip process if is not in list of source hosts in which this
# plugin should run
if src_host_name not in self.allowed_hosts:

View file

@ -32,7 +32,6 @@ from openpype.client import (
get_subsets,
get_last_versions
)
from openpype.pipeline import legacy_io
class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
@ -49,7 +48,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
def process(self, context):
self.log.info("Collecting anatomy data for all instances.")
project_name = legacy_io.active_project()
project_name = context.data["projectName"]
self.fill_missing_asset_docs(context, project_name)
self.fill_instance_data_from_asset(context)
self.fill_latest_versions(context, project_name)

View file

@ -15,7 +15,11 @@ import pyblish.api
class CollectResourcesPath(pyblish.api.InstancePlugin):
"""Generate directory path where the files and resources will be stored"""
"""Generate directory path where the files and resources will be stored.
Collects folder name and file name from files, if exists, for in-situ
publishing.
"""
label = "Collect Resources Path"
order = pyblish.api.CollectorOrder + 0.495

View file

@ -1,10 +1,7 @@
import pyblish.api
from openpype.client import get_representations
from openpype.pipeline import (
registered_host,
legacy_io,
)
from openpype.pipeline import registered_host
class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
@ -44,7 +41,7 @@ class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
for container in containers
}
project_name = legacy_io.active_project()
project_name = context.data["projectName"]
repre_docs = get_representations(
project_name,
representation_ids=repre_ids,

View file

@ -0,0 +1,42 @@
"""
Requires:
instance -> currentFile
instance -> source
Provides:
instance -> originalBasename
instance -> originalDirname
"""
import os
import pyblish.api
class CollectSourceForSource(pyblish.api.InstancePlugin):
"""Collects source location of file for instance.
Used for 'source' template name which handles in place publishing.
For this kind of publishing files are present with correct file name
pattern and correct location.
"""
label = "Collect Source"
order = pyblish.api.CollectorOrder + 0.495
def process(self, instance):
# parse folder name and file name for online and source templates
# currentFile comes from hosts workfiles
# source comes from Publisher
current_file = instance.data.get("currentFile")
source = instance.data.get("source")
source_file = current_file or source
if source_file:
self.log.debug("Parsing paths for {}".format(source_file))
if not instance.data.get("originalBasename"):
instance.data["originalBasename"] = \
os.path.basename(source_file)
if not instance.data.get("originalDirname"):
instance.data["originalDirname"] = \
os.path.dirname(source_file)

View file

@ -91,7 +91,7 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
full_input_path = os.path.join(src_staging, input_file)
self.log.info("input {}".format(full_input_path))
filename = os.path.splitext(input_file)[0]
jpeg_file = filename + ".jpg"
jpeg_file = filename + "_thumb.jpg"
full_output_path = os.path.join(dst_staging, jpeg_file)
if oiio_supported:

View file

@ -100,7 +100,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
self.log.info("Thumbnail source: {}".format(thumbnail_source))
src_basename = os.path.basename(thumbnail_source)
dst_filename = os.path.splitext(src_basename)[0] + ".jpg"
dst_filename = os.path.splitext(src_basename)[0] + "_thumb.jpg"
full_output_path = os.path.join(dst_staging, dst_filename)
if oiio_supported:

View file

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Source directory not collected</title>
<description>
## Source directory not collected
Instance is marked for in place publishing. Its 'originalDirname' must be collected. Contact OP developer to modify collector.
</description>
<detail>
### __Detailed Info__ (optional)
In place publishing uses source directory and file name in resulting path and file name of published item. For this instance
all required metadata weren't filled. This is not recoverable error, unless instance itself is removed.
Collector for this instance must be updated for instance to be published.
</detail>
</error>
<error id="not_in_dir">
<title>Source file not in project dir</title>
<description>
## Source file not in project dir
Path '{original_dirname}' not in project folder. Please publish from inside of project folder.
### How to repair?
Restart publish after you moved source file into project directory.
</description>
</error>
</root>

View file

@ -25,7 +25,6 @@ from openpype.client import (
)
from openpype.lib import source_hash
from openpype.lib.file_transaction import FileTransaction
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import (
KnownPublishError,
get_publish_template_name,
@ -132,7 +131,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"mvUsdComposition",
"mvUsdOverride",
"simpleUnrealTexture",
"online"
"online",
"uasset"
]
default_template_name = "publish"
@ -244,7 +244,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
return filtered_repres
def register(self, instance, file_transactions, filtered_repres):
project_name = legacy_io.active_project()
project_name = instance.context.data["projectName"]
instance_stagingdir = instance.data.get("stagingDir")
if not instance_stagingdir:
@ -270,6 +270,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
)
instance.data["versionEntity"] = version
anatomy = instance.context.data["anatomy"]
# Get existing representations (if any)
existing_repres_by_name = {
repre_doc["name"].lower(): repre_doc
@ -303,13 +305,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# .ma representation. Those destination paths are pre-defined, etc.
# todo: should we move or simplify this logic?
resource_destinations = set()
for src, dst in instance.data.get("transfers", []):
file_transactions.add(src, dst, mode=FileTransaction.MODE_COPY)
resource_destinations.add(os.path.abspath(dst))
for src, dst in instance.data.get("hardlinks", []):
file_transactions.add(src, dst, mode=FileTransaction.MODE_HARDLINK)
resource_destinations.add(os.path.abspath(dst))
file_copy_modes = [
("transfers", FileTransaction.MODE_COPY),
("hardlinks", FileTransaction.MODE_HARDLINK)
]
for files_type, copy_mode in file_copy_modes:
for src, dst in instance.data.get(files_type, []):
self._validate_path_in_project_roots(anatomy, dst)
file_transactions.add(src, dst, mode=copy_mode)
resource_destinations.add(os.path.abspath(dst))
# Bulk write to the database
# We write the subset and version to the database before the File
@ -342,7 +348,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# Compute the resource file infos once (files belonging to the
# version instance instead of an individual representation) so
# we can re-use those file infos per representation
anatomy = instance.context.data["anatomy"]
resource_file_infos = self.get_files_info(resource_destinations,
sites=sites,
anatomy=anatomy)
@ -529,6 +534,20 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_data["representation"] = repre["name"]
template_data["ext"] = repre["ext"]
stagingdir = repre.get("stagingDir")
if not stagingdir:
# Fall back to instance staging dir if not explicitly
# set for representation in the instance
self.log.debug((
"Representation uses instance staging dir: {}"
).format(instance_stagingdir))
stagingdir = instance_stagingdir
if not stagingdir:
raise KnownPublishError(
"No staging directory set for representation: {}".format(repre)
)
# optionals
# retrieve additional anatomy data from representation if exists
for key, anatomy_key in {
@ -548,20 +567,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
if value is not None:
template_data[anatomy_key] = value
stagingdir = repre.get("stagingDir")
if not stagingdir:
# Fall back to instance staging dir if not explicitly
# set for representation in the instance
self.log.debug((
"Representation uses instance staging dir: {}"
).format(instance_stagingdir))
stagingdir = instance_stagingdir
if not stagingdir:
raise KnownPublishError(
"No staging directory set for representation: {}".format(repre)
)
self.log.debug("Anatomy template name: {}".format(template_name))
anatomy = instance.context.data["anatomy"]
publish_template_category = anatomy.templates[template_name]
@ -569,6 +574,25 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
is_udim = bool(repre.get("udim"))
# handle publish in place
if "originalDirname" in template:
# store as originalDirname only original value without project root
# if instance collected originalDirname is present, it should be
# used for all represe
# from temp to final
original_directory = (
instance.data.get("originalDirname") or instance_stagingdir)
_rootless = self.get_rootless_path(anatomy, original_directory)
if _rootless == original_directory:
raise KnownPublishError((
"Destination path '{}' ".format(original_directory) +
"must be in project dir"
))
relative_path_start = _rootless.rfind('}') + 2
without_root = _rootless[relative_path_start:]
template_data["originalDirname"] = without_root
is_sequence_representation = isinstance(files, (list, tuple))
if is_sequence_representation:
# Collection of files (sequence)
@ -587,6 +611,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
))
src_collection = src_collections[0]
template_data["originalBasename"] = src_collection.head[:-1]
destination_indexes = list(src_collection.indexes)
# Use last frame for minimum padding
# - that should cover both 'udim' and 'frame' minimum padding
@ -671,12 +696,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
raise KnownPublishError(
"This is a bug. Representation file name is full path"
)
template_data["originalBasename"], _ = os.path.splitext(fname)
# Manage anatomy template data
template_data.pop("frame", None)
if is_udim:
template_data["udim"] = repre["udim"][0]
# Construct destination filepath from template
anatomy_filled = anatomy.format(template_data)
template_filled = anatomy_filled[template_name]["path"]
@ -805,11 +829,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"""Return anatomy template name to use for integration"""
# Anatomy data is pre-filled by Collectors
project_name = legacy_io.active_project()
context = instance.context
project_name = context.data["projectName"]
# Task can be optional in anatomy data
host_name = instance.context.data["hostName"]
host_name = context.data["hostName"]
anatomy_data = instance.data["anatomyData"]
family = anatomy_data["family"]
task_info = anatomy_data.get("task") or {}
@ -820,7 +844,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
family,
task_name=task_info.get("name"),
task_type=task_info.get("type"),
project_settings=instance.context.data["project_settings"],
project_settings=context.data["project_settings"],
logger=self.log
)
@ -890,3 +914,21 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"hash": source_hash(path),
"sites": sites
}
def _validate_path_in_project_roots(self, anatomy, file_path):
"""Checks if 'file_path' starts with any of the roots.
Used to check that published path belongs to project, eg. we are not
trying to publish to local only folder.
Args:
anatomy (Anatomy)
file_path (str)
Raises
(KnownPublishError)
"""
path = self.get_rootless_path(anatomy, file_path)
if not path:
raise KnownPublishError((
"Destination path '{}' ".format(file_path) +
"must be in project dir"
))

View file

@ -2,7 +2,6 @@ from pprint import pformat
import pyblish.api
from openpype.pipeline import legacy_io
from openpype.client import get_assets
@ -28,10 +27,7 @@ class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
asset_and_parents = self.get_parents(context)
self.log.debug("__ asset_and_parents: {}".format(asset_and_parents))
if not legacy_io.Session:
legacy_io.install()
project_name = legacy_io.active_project()
project_name = context.data["projectName"]
db_assets = list(get_assets(
project_name, fields=["name", "data.parents"]
))

View file

@ -0,0 +1,74 @@
import pyblish.api
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline.publish import (
PublishXmlValidationError,
get_publish_template_name,
)
class ValidatePublishDir(pyblish.api.InstancePlugin):
"""Validates if 'publishDir' is a project directory
'publishDir' is collected based on publish templates. In specific cases
('source' template) source folder of items is used as a 'publishDir', this
validates if it is inside any project dir for the project.
(eg. files are not published from local folder, unaccessible for studio'
"""
order = ValidateContentsOrder
label = "Validate publish dir"
checked_template_names = ["source"]
# validate instances might have interim family, needs to be mapped to final
family_mapping = {
"renderLayer": "render",
"renderLocal": "render"
}
def process(self, instance):
template_name = self._get_template_name_from_instance(instance)
if template_name not in self.checked_template_names:
return
original_dirname = instance.data.get("originalDirname")
if not original_dirname:
raise PublishXmlValidationError(
self,
"Instance meant for in place publishing."
" Its 'originalDirname' must be collected."
" Contact OP developer to modify collector."
)
anatomy = instance.context.data["anatomy"]
success, _ = anatomy.find_root_template_from_path(original_dirname)
formatting_data = {
"original_dirname": original_dirname,
}
msg = "Path '{}' not in project folder.".format(original_dirname) + \
" Please publish from inside of project folder."
if not success:
raise PublishXmlValidationError(self, msg, key="not_in_dir",
formatting_data=formatting_data)
def _get_template_name_from_instance(self, instance):
project_name = instance.context.data["projectName"]
host_name = instance.context.data["hostName"]
anatomy_data = instance.data["anatomyData"]
family = anatomy_data["family"]
family = self.family_mapping.get("family") or family
task_info = anatomy_data.get("task") or {}
return get_publish_template_name(
project_name,
host_name,
family,
task_name=task_info.get("name"),
task_type=task_info.get("type"),
project_settings=instance.context.data["project_settings"],
logger=self.log
)

View file

@ -53,11 +53,17 @@
"file": "{originalBasename}<.{@frame}><_{udim}>.{ext}",
"path": "{@folder}/{@file}"
},
"source": {
"folder": "{root[work]}/{originalDirname}",
"file": "{originalBasename}<.{@frame}><_{udim}>.{ext}",
"path": "{@folder}/{@file}"
},
"__dynamic_keys_labels__": {
"maya2unreal": "Maya to Unreal",
"simpleUnrealTextureHero": "Simple Unreal Texture - Hero",
"simpleUnrealTexture": "Simple Unreal Texture",
"online": "online"
"online": "online",
"source": "source"
}
}
}

View file

@ -25,6 +25,7 @@
"active": true,
"tile_assembler_plugin": "OpenPypeTileAssembler",
"use_published": true,
"import_reference": false,
"asset_dependencies": true,
"priority": 50,
"tile_priority": 50,

View file

@ -130,6 +130,11 @@
"key": "use_published",
"label": "Use Published scene"
},
{
"type": "boolean",
"key": "import_reference",
"label": "Use Scene with Imported Reference"
},
{
"type": "boolean",
"key": "asset_dependencies",

View file

@ -4740,9 +4740,9 @@ json-schema-traverse@^1.0.0:
integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
json5@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe"
integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==
version "1.0.2"
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593"
integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==
dependencies:
minimist "^1.2.0"
@ -5154,16 +5154,11 @@ minimatch@^3.0.4:
dependencies:
brace-expansion "^1.1.7"
minimist@^1.2.0:
minimist@^1.2.0, minimist@^1.2.5:
version "1.2.7"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==
minimist@^1.2.5:
version "1.2.6"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"
integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==
mkdirp@^0.5.5:
version "0.5.5"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"