mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
679a2c852e
15 changed files with 158 additions and 103 deletions
14
.github/workflows/prerelease.yml
vendored
14
.github/workflows/prerelease.yml
vendored
|
|
@ -62,7 +62,19 @@ jobs:
|
|||
- name: "🖨️ Print changelog to console"
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: cat CHANGELOG.md
|
||||
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
git add .
|
||||
git commit -m "[Automated] Bump version"
|
||||
tag_name="CI/${{ steps.version.outputs.next_tag }}"
|
||||
echo $tag_name
|
||||
git tag -a $tag_name -m "nightly build"
|
||||
|
||||
- name: Push to protected main branch
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ def create_otio_reference(clip):
|
|||
padding = media_source.filenamePadding()
|
||||
file_head = media_source.filenameHead()
|
||||
is_sequence = not media_source.singleFile()
|
||||
frame_duration = media_source.duration()
|
||||
frame_duration = media_source.duration() - 1
|
||||
fps = utils.get_rate(clip) or self.project_fps
|
||||
extension = os.path.splitext(path)[-1]
|
||||
|
||||
|
|
|
|||
|
|
@ -296,6 +296,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
if otio_clip.name not in track_item.name():
|
||||
continue
|
||||
self.log.debug("__ parent_range: {}".format(parent_range))
|
||||
self.log.debug("__ timeline_range: {}".format(timeline_range))
|
||||
if openpype.lib.is_overlapping_otio_ranges(
|
||||
parent_range, timeline_range, strict=True):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"""A module containing generic loader actions that will display in the Loader.
|
||||
|
||||
"""
|
||||
|
||||
import qargparse
|
||||
from openpype.pipeline import load
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
|
|
@ -98,6 +98,15 @@ class ImportMayaLoader(load.LoaderPlugin):
|
|||
icon = "arrow-circle-down"
|
||||
color = "#775555"
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"clean_import",
|
||||
label="Clean import",
|
||||
default=False,
|
||||
help="Should all occurences of cbId be purged?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
import maya.cmds as cmds
|
||||
|
||||
|
|
@ -114,13 +123,22 @@ class ImportMayaLoader(load.LoaderPlugin):
|
|||
)
|
||||
|
||||
with maintained_selection():
|
||||
cmds.file(self.fname,
|
||||
i=True,
|
||||
preserveReferences=True,
|
||||
namespace=namespace,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
nodes = cmds.file(self.fname,
|
||||
i=True,
|
||||
preserveReferences=True,
|
||||
namespace=namespace,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
if data.get("clean_import", False):
|
||||
remove_attributes = ["cbId"]
|
||||
for node in nodes:
|
||||
for attr in remove_attributes:
|
||||
if cmds.attributeQuery(attr, node=node, exists=True):
|
||||
full_attr = "{}.{}".format(node, attr)
|
||||
print("Removing {}".format(full_attr))
|
||||
cmds.deleteAttr(full_attr)
|
||||
|
||||
# We do not containerize imported content, it remains unmanaged
|
||||
return
|
||||
|
|
|
|||
|
|
@ -39,6 +39,9 @@ class CollectBatchData(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
self.log.info("CollectBatchData")
|
||||
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
|
||||
if os.environ.get("IS_TEST"):
|
||||
self.log.debug("Automatic testing, no batch data, skipping")
|
||||
return
|
||||
|
||||
assert batch_dir, (
|
||||
"Missing `OPENPYPE_PUBLISH_DATA`")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import openpype.api
|
|||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
get_ffprobe_streams,
|
||||
path_to_subprocess_arg,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -37,82 +38,69 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
if not thumbnail_repre:
|
||||
return
|
||||
|
||||
thumbnail_repre.pop("thumbnail")
|
||||
files = thumbnail_repre.get("files")
|
||||
if not files:
|
||||
return
|
||||
|
||||
if isinstance(files, list):
|
||||
files_len = len(files)
|
||||
file = str(files[0])
|
||||
first_filename = str(files[0])
|
||||
else:
|
||||
files_len = 1
|
||||
file = files
|
||||
first_filename = files
|
||||
|
||||
staging_dir = None
|
||||
is_jpeg = False
|
||||
if file.endswith(".jpeg") or file.endswith(".jpg"):
|
||||
is_jpeg = True
|
||||
|
||||
if is_jpeg and files_len == 1:
|
||||
# skip if already is single jpeg file
|
||||
return
|
||||
# Convert to jpeg if not yet
|
||||
full_input_path = os.path.join(
|
||||
thumbnail_repre["stagingDir"], first_filename
|
||||
)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
with tempfile.NamedTemporaryFile(suffix=".jpg") as tmp:
|
||||
full_thumbnail_path = tmp.name
|
||||
|
||||
elif is_jpeg:
|
||||
# use first frame as thumbnail if is sequence of jpegs
|
||||
full_thumbnail_path = os.path.join(
|
||||
thumbnail_repre["stagingDir"], file
|
||||
)
|
||||
self.log.info(
|
||||
"For thumbnail is used file: {}".format(full_thumbnail_path)
|
||||
)
|
||||
self.log.info("output {}".format(full_thumbnail_path))
|
||||
|
||||
else:
|
||||
# Convert to jpeg if not yet
|
||||
full_input_path = os.path.join(thumbnail_repre["stagingDir"], file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
instance.context.data["cleanupFullPaths"].append(full_thumbnail_path)
|
||||
|
||||
full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1]
|
||||
self.log.info("output {}".format(full_thumbnail_path))
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
ffmpeg_args = self.ffmpeg_args or {}
|
||||
|
||||
ffmpeg_args = self.ffmpeg_args or {}
|
||||
jpeg_items = [
|
||||
path_to_subprocess_arg(ffmpeg_path),
|
||||
# override file if already exists
|
||||
"-y"
|
||||
]
|
||||
|
||||
jpeg_items = [
|
||||
"\"{}\"".format(ffmpeg_path),
|
||||
# override file if already exists
|
||||
"-y"
|
||||
]
|
||||
|
||||
# add input filters from peresets
|
||||
jpeg_items.extend(ffmpeg_args.get("input") or [])
|
||||
# input file
|
||||
jpeg_items.append("-i \"{}\"".format(full_input_path))
|
||||
# add input filters from peresets
|
||||
jpeg_items.extend(ffmpeg_args.get("input") or [])
|
||||
# input file
|
||||
jpeg_items.extend([
|
||||
"-i", path_to_subprocess_arg(full_input_path),
|
||||
# extract only single file
|
||||
jpeg_items.append("-frames:v 1")
|
||||
"-frames:v", "1",
|
||||
# Add black background for transparent images
|
||||
jpeg_items.append((
|
||||
"-filter_complex"
|
||||
" \"color=black,format=rgb24[c]"
|
||||
"-filter_complex", (
|
||||
"\"color=black,format=rgb24[c]"
|
||||
";[c][0]scale2ref[c][i]"
|
||||
";[c][i]overlay=format=auto:shortest=1,setsar=1\""
|
||||
))
|
||||
),
|
||||
])
|
||||
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
|
||||
# output file
|
||||
jpeg_items.append("\"{}\"".format(full_thumbnail_path))
|
||||
# output file
|
||||
jpeg_items.append(path_to_subprocess_arg(full_thumbnail_path))
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(subprocess_jpeg))
|
||||
openpype.api.run_subprocess(
|
||||
subprocess_jpeg, shell=True, logger=self.log
|
||||
)
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(subprocess_jpeg))
|
||||
openpype.api.run_subprocess(
|
||||
subprocess_jpeg, shell=True, logger=self.log
|
||||
)
|
||||
|
||||
# remove thumbnail key from origin repre
|
||||
thumbnail_repre.pop("thumbnail")
|
||||
streams = get_ffprobe_streams(full_thumbnail_path)
|
||||
width = height = None
|
||||
for stream in streams:
|
||||
|
|
@ -121,8 +109,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
height = stream["height"]
|
||||
break
|
||||
|
||||
filename = os.path.basename(full_thumbnail_path)
|
||||
staging_dir = staging_dir or os.path.dirname(full_thumbnail_path)
|
||||
staging_dir, filename = os.path.split(full_thumbnail_path)
|
||||
|
||||
# create new thumbnail representation
|
||||
representation = {
|
||||
|
|
@ -130,15 +117,11 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
'ext': 'jpg',
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": ["thumbnail"],
|
||||
"tags": ["thumbnail", "delete"],
|
||||
}
|
||||
if width and height:
|
||||
representation["width"] = width
|
||||
representation["height"] = height
|
||||
|
||||
# # add Delete tag when temp file was rendered
|
||||
if not is_jpeg:
|
||||
representation["tags"].append("delete")
|
||||
|
||||
self.log.info(f"New representation {representation}")
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def otio_range_to_frame_range(otio_range):
|
|||
start = _ot.to_frames(
|
||||
otio_range.start_time, otio_range.start_time.rate)
|
||||
end = start + _ot.to_frames(
|
||||
otio_range.duration, otio_range.duration.rate) - 1
|
||||
otio_range.duration, otio_range.duration.rate)
|
||||
return start, end
|
||||
|
||||
|
||||
|
|
@ -254,7 +254,7 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end):
|
|||
media_in + source_in + offset_in)
|
||||
media_out_trimmed = (
|
||||
media_in + source_in + (
|
||||
((source_range.duration.value - 1) * abs(
|
||||
(source_range.duration.value * abs(
|
||||
time_scalar)) + offset_out))
|
||||
|
||||
# calculate available handles
|
||||
|
|
|
|||
|
|
@ -921,12 +921,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if self.enabled:
|
||||
for project in self.connection.projects(projection={"name": 1}):
|
||||
project_name = project["name"]
|
||||
project_settings = self.get_sync_project_setting(project_name)
|
||||
if project_settings and project_settings.get("enabled"):
|
||||
if self.is_project_enabled(project_name):
|
||||
enabled_projects.append(project_name)
|
||||
|
||||
return enabled_projects
|
||||
|
||||
def is_project_enabled(self, project_name):
|
||||
if self.enabled:
|
||||
project_settings = self.get_sync_project_setting(project_name)
|
||||
if project_settings and project_settings.get("enabled"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def handle_alternate_site(self, collection, representation, processed_site,
|
||||
file_id, synced_file_id):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -116,10 +116,14 @@ def install_openpype_plugins(project_name=None, host_name=None):
|
|||
pyblish.api.register_discovery_filter(filter_pyblish_plugins)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"]
|
||||
for path in publish_plugin_dirs:
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
if host_name is None:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
creator_paths = modules_manager.collect_creator_plugin_paths(host_name)
|
||||
for creator_path in creator_paths:
|
||||
register_creator_plugin_path(creator_path)
|
||||
|
|
|
|||
|
|
@ -98,4 +98,4 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -87,7 +87,8 @@
|
|||
"camera",
|
||||
"gizmo",
|
||||
"source",
|
||||
"render"
|
||||
"render",
|
||||
"write"
|
||||
]
|
||||
},
|
||||
"ValidateInstanceInContext": {
|
||||
|
|
|
|||
|
|
@ -41,6 +41,9 @@
|
|||
},
|
||||
{
|
||||
"render": "render"
|
||||
},
|
||||
{
|
||||
"write": "write"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import copy
|
||||
import re
|
||||
import math
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from Qt import QtCore, QtGui
|
||||
|
|
@ -38,6 +39,14 @@ def is_filtering_recursible():
|
|||
|
||||
class BaseRepresentationModel(object):
|
||||
"""Methods for SyncServer useful in multiple models"""
|
||||
# Cheap & hackish way how to avoid refreshing of whole sync server module
|
||||
# on each selection change
|
||||
_last_project = None
|
||||
_modules_manager = None
|
||||
_last_project_cache = 0
|
||||
_last_manager_cache = 0
|
||||
_max_project_cache_time = 30
|
||||
_max_manager_cache_time = 60
|
||||
|
||||
def reset_sync_server(self, project_name=None):
|
||||
"""Sets/Resets sync server vars after every change (refresh.)"""
|
||||
|
|
@ -47,28 +56,53 @@ class BaseRepresentationModel(object):
|
|||
remote_site = remote_provider = None
|
||||
|
||||
if not project_name:
|
||||
project_name = self.dbcon.Session["AVALON_PROJECT"]
|
||||
project_name = self.dbcon.Session.get("AVALON_PROJECT")
|
||||
else:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
if project_name:
|
||||
manager = ModulesManager()
|
||||
sync_server = manager.modules_by_name["sync_server"]
|
||||
if not project_name:
|
||||
self.repre_icons = repre_icons
|
||||
self.sync_server = sync_server
|
||||
self.active_site = active_site
|
||||
self.active_provider = active_provider
|
||||
self.remote_site = remote_site
|
||||
self.remote_provider = remote_provider
|
||||
return
|
||||
|
||||
if project_name in sync_server.get_enabled_projects():
|
||||
active_site = sync_server.get_active_site(project_name)
|
||||
active_provider = sync_server.get_provider_for_site(
|
||||
project_name, active_site)
|
||||
if active_site == 'studio': # for studio use explicit icon
|
||||
active_provider = 'studio'
|
||||
now_time = time.time()
|
||||
project_cache_diff = now_time - self._last_project_cache
|
||||
if project_cache_diff > self._max_project_cache_time:
|
||||
self._last_project = None
|
||||
|
||||
remote_site = sync_server.get_remote_site(project_name)
|
||||
remote_provider = sync_server.get_provider_for_site(
|
||||
project_name, remote_site)
|
||||
if remote_site == 'studio': # for studio use explicit icon
|
||||
remote_provider = 'studio'
|
||||
if project_name == self._last_project:
|
||||
return
|
||||
|
||||
repre_icons = lib.get_repre_icons()
|
||||
self._last_project = project_name
|
||||
self._last_project_cache = now_time
|
||||
|
||||
manager_cache_diff = now_time - self._last_manager_cache
|
||||
if manager_cache_diff > self._max_manager_cache_time:
|
||||
self._modules_manager = None
|
||||
|
||||
if self._modules_manager is None:
|
||||
self._modules_manager = ModulesManager()
|
||||
self._last_manager_cache = now_time
|
||||
|
||||
sync_server = self._modules_manager.modules_by_name["sync_server"]
|
||||
if sync_server.is_project_enabled(project_name):
|
||||
active_site = sync_server.get_active_site(project_name)
|
||||
active_provider = sync_server.get_provider_for_site(
|
||||
project_name, active_site)
|
||||
if active_site == 'studio': # for studio use explicit icon
|
||||
active_provider = 'studio'
|
||||
|
||||
remote_site = sync_server.get_remote_site(project_name)
|
||||
remote_provider = sync_server.get_provider_for_site(
|
||||
project_name, remote_site)
|
||||
if remote_site == 'studio': # for studio use explicit icon
|
||||
remote_provider = 'studio'
|
||||
|
||||
repre_icons = lib.get_repre_icons()
|
||||
|
||||
self.repre_icons = repre_icons
|
||||
self.sync_server = sync_server
|
||||
|
|
|
|||
|
|
@ -202,6 +202,7 @@ def cli_publish(data, publish_paths, gui=True):
|
|||
if os.path.exists(json_data_path):
|
||||
with open(json_data_path, "r") as f:
|
||||
result = json.load(f)
|
||||
os.remove(json_data_path)
|
||||
|
||||
log.info(f"Publish result: {result}")
|
||||
|
||||
|
|
|
|||
12
start.py
12
start.py
|
|
@ -386,18 +386,6 @@ def set_modules_environments():
|
|||
modules_manager = ModulesManager()
|
||||
|
||||
module_envs = modules_manager.collect_global_environments()
|
||||
publish_plugin_dirs = modules_manager.collect_plugin_paths()["publish"]
|
||||
|
||||
# Set pyblish plugins paths if any module want to register them
|
||||
if publish_plugin_dirs:
|
||||
publish_paths_str = os.environ.get("PYBLISHPLUGINPATH") or ""
|
||||
publish_paths = publish_paths_str.split(os.pathsep)
|
||||
_publish_paths = {
|
||||
os.path.normpath(path) for path in publish_paths if path
|
||||
}
|
||||
for path in publish_plugin_dirs:
|
||||
_publish_paths.add(os.path.normpath(path))
|
||||
module_envs["PYBLISHPLUGINPATH"] = os.pathsep.join(_publish_paths)
|
||||
|
||||
# Merge environments with current environments and update values
|
||||
if module_envs:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue