Merge branch 'feature/nuke-slate-prerender' into master-testing-local

# Conflicts:
#	pype/plugins/global/publish/collect_filesequences.py
#	pype/plugins/global/publish/integrate_new.py
This commit is contained in:
jakub@orbi.tools 2020-01-14 19:58:03 +01:00
commit 13f08c7f1f
8 changed files with 77 additions and 39 deletions

View file

@ -101,6 +101,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
resolution_height = 1080
lut_path = None
slate_frame = None
families_data = None
subset = None
if os.environ.get("PYPE_PUBLISH_PATHS"):
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
@ -157,6 +158,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
lut_path = instance.get("lutPath", None)
baked_mov_path = instance.get("bakeRenderPath")
subset = instance.get("subset")
families_data = instance.get("families")
slate_frame = instance.get("slateFrame")
else:
@ -197,6 +199,8 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
families.append("ftrack")
if "write" in instance_family:
families.append("write")
if families_data and "slate" in families_data:
families.append("slate")
if data.get("attachTo"):
# we need to attach found collections to existing

View file

@ -357,7 +357,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_head,
dst_start_frame,
dst_tail).replace("..", ".")
repre['published_path'] = dst
repre['published_path'] = self.unc_convert(dst)
else:
# Single file
@ -386,7 +386,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
repre['published_path'] = dst
repre['published_path'] = self.unc_convert(dst)
self.log.debug("__ dst: {}".format(dst))
representation = {
@ -460,6 +460,27 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
def unc_convert(self, path):
self.log.debug("_ path .. `{}`".format(path))
drive, _path = os.path.splitdrive(path)
self.log.debug("_ drive, _path .. `{}`, `{}`".format(drive, _path))
unc = Path(drive).resolve()
self.log.debug("_ unc.resolved .. `{}`".format(unc))
path = str(unc) + _path
self.log.debug("_ path.resolved .. `{}`".format(path))
if not os.path.exists(str(unc)):
self.log.info("_ converting to unc from environments ..")
path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
self.log.debug("_ path_replace .. `{}`".format(path_replace))
self.log.debug("_ path_mount .. `{}`".format(path_mount))
if "/" in path_mount:
path = path.replace(path_mount[0:-1], path_replace)
else:
path = path.replace(path_mount, path_replace)
return path
def copy_file(self, src, dst):
""" Copy given source to destination
@ -469,11 +490,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Returns:
None
"""
src = str(Path(src).resolve())
drive, _path = os.path.splitdrive(dst)
unc = Path(drive).resolve()
dst = str(unc) + _path
src = self.unc_convert(src)
dst = self.unc_convert(dst)
self.log.debug("Copying file .. {} -> {}".format(src, dst))
dirname = os.path.dirname(dst)
@ -495,10 +513,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
src = str(Path(src).resolve())
drive, _path = os.path.splitdrive(dst)
unc = Path(drive).resolve()
dst = str(unc) + _path
src = self.unc_convert(src)
dst = self.unc_convert(dst)
try:
os.makedirs(dirname)

View file

@ -202,19 +202,27 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# job so they use the same environment
environment = job["Props"].get("Env", {})
environment = dict(
{key: os.environ[key] for key in self.enviro_filter
if key in environment}, **api.Session)
self.log.debug("___> enviro: {}".format(environment))
for _key in os.environ:
if _key.lower().startswith('pype_'):
environment[_key] = os.environ[_key]
i = 0
for index, key in enumerate(environment):
self.log.info("KEY: {}".format(key))
self.log.info("FILTER: {}".format(self.enviro_filter))
if key.upper() in self.enviro_filter:
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % i: "{key}={value}".format(
key=key,
value=environment[key]
)
})
i += 1
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % i: "{key}={value}".format(
key=key,
value=environment[key]
)
})
i += 1
# Avoid copied pools and remove secondary pool
payload["JobInfo"]["Pool"] = "none"