From bff817afd999ddf2536e48340d0bae0ce049b1cf Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 8 Apr 2022 18:59:33 +0200 Subject: [PATCH 001/409] wip on new publisher conversion --- openpype/hosts/houdini/api/lib.py | 18 +++++++++++++ openpype/hosts/houdini/api/plugin.py | 24 +++++++++++++++-- .../hosts/houdini/hooks/set_operators_path.py | 25 ++++++++++++++++++ openpype/hosts/houdini/otls/OpenPype.hda | Bin 0 -> 8238 bytes .../plugins/create/create_pointcache.py | 5 +++- 5 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py create mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..911df31714 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -453,3 +453,21 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def load_creator_code_to_asset( + otl_file_path, node_type_name, source_file_path): + # type: (str, str, str) -> None + # Load the Python source code. + with open(source_file_path, "rb") as src: + source = src.read() + + # Find the asset definition in the otl file. + definitions = [definition + for definition in hou.hda.definitionsInFile(otl_file_path) + if definition.nodeTypeName() == node_type_name] + assert(len(definitions) == 1) + definition = definitions[0] + + # Store the source code into the PythonCook section of the asset. + definition.addSection("PythonCook", source) \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2bbb65aa05..64abfe9ef9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -2,11 +2,17 @@ """Houdini specific Avalon/Pyblish plugin definitions.""" import sys import six - +from abc import ( + ABCMeta, + abstractmethod, + abstractproperty +) +import six import hou from openpype.pipeline import ( CreatorError, - LegacyCreator + LegacyCreator, + Creator as NewCreator ) from .lib import imprint @@ -84,3 +90,17 @@ class Creator(LegacyCreator): OpenPypeCreatorError, OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator): + _nodes = [] + + def collect_instances(self): + pass + + def update_instances(self, update_list): + pass + + def remove_instances(self, instances): + pass \ No newline at end of file diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py new file mode 100644 index 0000000000..6f26baaa78 --- /dev/null +++ b/openpype/hosts/houdini/hooks/set_operators_path.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from openpype.lib import PreLaunchHook +import os + + +class SetOperatorsPath(PreLaunchHook): + """Set path to OpenPype assets folder.""" + + app_groups = ["houdini"] + + def execute(self): + hou_path = self.launch_context.env.get("HOUDINIPATH") + + openpype_assets = os.path.join( + os.getenv("OPENPYPE_REPOS_ROOT"), + "openpype", "hosts", "houdini", "hda" + ) + + if not hou_path: + self.launch_context.env["HOUDINIPATH"] = openpype_assets + return + + self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( + hou_path, os.pathsep, openpype_assets + ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda new file mode 100644 index 0000000000000000000000000000000000000000..b34418d422b69282353dc134b1c4855e377c1039 GIT binary patch literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t literal 0 HcmV?d00001 diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index feb683edf6..27112260ad 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,7 +1,7 @@ from openpype.hosts.houdini.api import plugin -class CreatePointCache(plugin.Creator): +class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" name = "pointcache" @@ -9,6 +9,9 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" + def create(self, subset_name, instance_data, pre_create_data): + pass + def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) From 0ac27ab609a1198255e2fdad846f7be698e0e725 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 12 May 2022 13:19:29 +0200 Subject: [PATCH 002/409] start of integration --- openpype/hosts/3dsmax/__init__.py | 0 openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.ms | 8 ++++ openpype/hosts/3dsmax/startup/startup.py | 2 + openpype/resources/app_icons/3dsmax.png | Bin 0 -> 12804 bytes .../system_settings/applications.json | 29 +++++++++++++ openpype/settings/entities/enum_entity.py | 1 + .../host_settings/schema_3dsmax.json | 39 ++++++++++++++++++ .../system_schema/schema_applications.json | 4 ++ 10 files changed, 83 insertions(+) create mode 100644 openpype/hosts/3dsmax/__init__.py create mode 100644 openpype/hosts/3dsmax/api/__init__.py create mode 100644 openpype/hosts/3dsmax/plugins/__init__.py create mode 100644 openpype/hosts/3dsmax/startup/startup.ms create mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/resources/app_icons/3dsmax.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/3dsmax/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/3dsmax/startup/startup.ms new file mode 100644 index 0000000000..94318afb01 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.ms @@ -0,0 +1,8 @@ +-- OpenPype Init Script +( + local sysPath = dotNetClass "System.IO.Path" + local sysDir = dotNetClass "System.IO.Directory" + local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + + python.ExecuteFile startup +) \ No newline at end of file diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py new file mode 100644 index 0000000000..dd8c08a6b9 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +print("inside python startup") \ No newline at end of file diff --git a/openpype/resources/app_icons/3dsmax.png b/openpype/resources/app_icons/3dsmax.png new file mode 100644 index 0000000000000000000000000000000000000000..9ebdf6099f6ac279ebaeccdf21a658a34da5e7b6 GIT binary patch literal 12804 zcmcJ0_gj-q&@M#;LPtR9O&|~uL5fK4O*#ol6fhK}S&-hOhAusTs7NnK2mxu*jUX6Q zdJ(0FA{~_8`5xZy`~&B@&iNt9l_$Hiv$M0a_uR8durxPh0`Y(-C@7eWjr6Q3C@7`R zUvw9NCqZE;e!vH%kF}u=1>q~-67c2118p;H3W}OkhGP^B@SXmN5yFRpg89Swi?Zi& zg$o6R+zVqpZQDSHwb%5ye9(7$Ef0*v-@Vz}dRIh+pr^m8B_ynRm9I+#H*QZY!Qrbc^HJ*$UXz7JZ;saBVHp>%jM>f>E zYc|vx+rm0@lHzj}DfTTI_EBxy68>u{`rUH0_XioCuRe@iGHx?f%Pn94E%_jACeV9Hac~(PZ9tUT7sGm`Z zUN=BK(nXa<{NUif(@?QFpM%>J%c-dSas4Xo^znVD#zrXA;rJpnuS096<$z0ZcADkl z+1GepKrAt!+QYH&YcEKd>61V!;_j13DOnXL1FrQ+x`+Kc6Q+rpXshf#OE`V>t%MQ* z89T?RlWP{WG_|)j)YwMZMRCfrR&b(`A%a@77u<&c<6XEXzEvJpIaZS&hdTfLqBjII zyiI`sqxGt@k#Oa1bXdx)T{KeDzbk6NvmTKA@>J2b6B z!T1Z`TI_aanyfIgZ6mC{HtE0Lt;&B$7Gbmo2xc9WSUerh zZw<-y{q2P!(l3>Tz*6o~LT6o`65D2H; z3X9ENAvi;HC$(Vv+_LIzD1^IZUj{fyG?rN#rAT*&`&_+!cBsZqor}x-d*u0OOd=bf zA9}eO<@Cwg%rnT|z6;~=?fA~7Yk=sEBY9Bh+4(~+8rUsGyji1J;C&afRFg)*yEW5&&P61w&WOS0HB7-6moQkE#Y7=SEoRoTYcN%Lk z!t|_#a}RPyT05`S_%K?X>n;t((+M>o+gqG1=m%X0YYW-JRabzQx0)X9lJOyry^!WvH; zAa^QOJ5eC2$bk;`GA7!kKg+r7g`8T#{j{Vyq1;8$ZXC79cSFyEp)$T%-B4o&N;H%f zPX_GhrUkpFf2kJ)^ckz&j3Z$w?{F76yVi#W`Sg-WVB8f;xZ>YNdE$hFU_4eRGxt&t zA*UM*aRlRedIj(E-r**pT+zm#pZd^FZ>N0V_Xkm+p)~2xo-(s3Wv2l%6GkvW6eKfw z^-oES71zR*KeT8l`m6cqM_BBk7A#&X>1Kio&1x_rx%O!k2}H4x@8;UHEY%=CLLy^( zGjlWV3mY=}ph?SLI@?10Ak@ma6_hT(aAC$UEX@OTP~91N-VdLbp7z4&v)26~Wu!}j3wjZcUN zm77ZrZJl7l zqJmaUZ@;P#G>yJkIrR4@R?Rb8vO#nR)KvMB=W$nBJwCaRh^25xnLR$f-SF7%J*;x5Lgw@eu8mY;75woD?W|!JrWU_YxQNX-yLY|E`VP<~5HIo~ zBEyfAFFkzyL}w6_aMKQ6Fe!r&@Wn@lEJ4Qgj2IbZA@G-F`kPr#TDmfK|(rGL8+-RPeIR__`IBC*p+`N{p_E&=wL> zmYm~&MYd+u{5B!q462pl_irMY?W94kpeT<86E}e+*p%zZb;G^5BQ%kN;pc#v1WFcX z=kU93mnWy$4O$y;jQ_hC?7Pt`l}MDX4y#IOc+$3Mymk8_&bU}+*VXj`8bcX4-rcVR zc8zIDOp_;j4yMNOP1;w3M1T(Q4a_&S#=FR-=AK{OlB{4#mT+z-n~z89HE`d#_)2PBXO60(Jst69MdX_?Y*O56B zh^yWgk5)Z@Q6U_Ud%+t6G|j+U8t>;<3dwI56zo1!j^}iTG1^3tq^`VvT{i7%$UUkMenNv#^}k=y^Orb#O+-{=3XqC8lML2unl+ZqiVlg}WshQGZS)X*Ut#=vKRzH~ zJeF{p(jvL~vt~c+!%?YGd7t~go>zi*i+ey+i6W4+5+Vg**lRfCkF4d|xZ~qz;o2Q{ z*abm-l*f|4{(Ql{`N3Fanv-YM2$mErlA3CJ7Q+U$MMRdMUA61bq~mlErv&R)DS%_X z4$59`I(jr#n-NQT%J@>=DeIs5hSge*eHqeR3Xu(`M$nha#VLwI(#p=)Y&X-*bO`oi-`spd z#Z|5tuTVhDiy&JWlq8qGzHe=ycUOm=sqi@&9yp;h{x2teS8(Cufa_?&f|a*x?%xj| zvphv}YXdsSZE|ioQ93xdpYpyI<)%y^LrlJN{5c?au8UHn2e&c=hlKGWpky*j(X~}M zx}cw)>GA@aHkgNGx*(ba936>DX_8098gu{4HUAHl&ubfq*`wDvV3#NmpPavcQI5-m zik)Wt6EWtV7l#x+fPh`4flZW_t(Kjde^1=zvcja4-)QX>SH%nH`J4H88I*ONf%cpjTY87TCFSD@==I?wFFC)AVXD4rVuoq!K=HRyK5lao%9*y(@Hxhu^cG z9LGk)oBufR?Na|)5C!MHtlBVn3CL_#)&z7aSETi_z_L9zHn>r

{SJ~!Y9>I3PqWedKP*SW{qUGfR^((-q<{G5uF z=Swwx2i~q_{TRd`=lM#obHE}a@k(CTAJXLVUb&ivPk=~k0{+s4xPOB&oSchk#Y@-N z&Aoc}!thRVt&YgjQ9zTa~O5;S(^--a|1z*l*QC;GD78qa`hLkLR7&#Ck<|OaeJea)>07gz`5RO6% zrn5Sas=aE8I`~(fJYoD`Qo&w&JgUaZZfG-#)!W|dO>6@r|HDr!birZIJHZe&4%pWT z^xmW9Pq*tOYw=nRq|Ioz9)Dc_i((Ta?zqw{`AIj1%*qFR4`S$}9PFqUp~f{InC--> zn9vwee;FL_W9eruu98tiX^f<3!#_hib!&s~BI&jx=oNY+(sB4$rYN2k6%}4NN zElD-7M5_PMyQ}U%iia9hI)7`XVK*>7T_$HGHY2+~1;$nb+?d+ozuskbg0Y zgW1QS+^#fQsL~5Ae&hD{WpMY?ITsm7o6hrSy4x0A`S%)>8D7dy_TIKOr7eC+JdUi%a_sOLi&#Eb!%AaT34hak`qKyR$#6Z#5A3X!n zBon$`kA8vC4b9UJy<~Jyq;~-^*vZB~&FO`!I@=vNSwu-Mwm((C8FZw`W4>d;9A7(; zqmNQdZp?-r$uwR;D0;fM77~|#S#V#zW-E5P%K-2JL$BUGgFCs?H~f$Ruko9~O5{0a zRSuZ0P%Zo9^gm|hv04+rXL(H1w;m zUOEW;nK^6Q_yn?qf0`3?&GIG2But1hE6wTT-8@}fUs6{hH@C1DA~;|dgm3^?v*I`Y zu5C_7)Dpflh><6Eh&2yMiYj6Fm2USx>^jTg$_IFcov?teG$8ME0ixAmEuI36(Kv^o zT*l=1`P`c3rNK+vAgoN^*Y%cqK&_$uCSE_TaHG}uX*G(Ep*V*m?Zhk%+bLU)=mUXFP{>P-qG^ zuCY3&7=4uM^XEi!-55nXOzN9T6L$A{#{hm&X%OyW1!~HN#S_i}JKX;+Ffjkf6Mjj2 z+3(j5h#8Nvg8N0*H9VRJ3!^d2xAHRTSkCg*?>tVQ3CRMeBF8UQIv@yuD;mp%j)+o^ zZnE=-la<5UrOb2$$KnZJ_iDpm%<|zG(#W(z~p&T=~(3~trw1XT5hkyU!5gD5_pT)Xhs9e+d>PY1aV zN--Y8HU2?_OGYxCc}PS3-2uGK_x9)xk@==2ri|aYB-Tf&y1W>7gWH}G*bqW82fE8h z@46SI`Y346Sm}0Ru|&n@L`}wK5G2LqLMe@_9pC7iOCR~To_1Rq*ztfM!o*E*$qoP- z#WG$(ksL6KRj{DI;DidQ=-Wm7bly7r*1^P*!BcH347whd4=_qp?x^uss_WNDa|D$y z37YeFh+4}2vD1pQdQ*yqsf_Z?LqMZn`p95MogcG5a)?2H%R?t}{yteXz2XE3;e|5s zd+=y^zqawzgK`18u8?R=mCoh=?4wyqGi?6(hE$jtTxnkSs!PB#0j1A$Qs8;q1;@B* zl}#g2p}Ua9pr`>=O~?M%Z%%+{DKfO&;ax1{=&iqHgp;LX;5D3>WCzXJlwpT@(+#vx zS82R#6+@dOJ9TZ-90#^&gf$%rKLr49mQNcynyE(S=_7%#sb_gUp2Dx2)kkVwux-q& z=f9g@m8--TRXiip-;XL^Z^p)u$P`96M0UBm5~?uU1ugH{kj>~}hP;Ke7A}%x5RvU= z8us{&OfQy7qI)~fxNA?NgL1u4A{HL;)fC4al*Y5pW)Ib$?G;S2LWAH8w+JfbeN?upRS@TYUb*7}Cl{LsJRMqe$LH z{XE=3a(WaNL+-RJJL3{!BT3{dMhvPOc!#yJy1VRLU6F2N&7Xm*=~xAqcs35bONQ25 zufk_3v1+a+jjRwyU0YUrluOzm6ae4MT1Wz+az2W9Tt&C_r*fz(;eNFyFH^Vi!SQV9 z?fP^&v+}Ix22ieD`_VWOnI2y`Kes-p&&d^2V=Ka^r($$45ayur^QMu86;YA!K6?HT z$fJ5ny4rDvmT;B1s>;#eU(eTX=UvMndhx{4^_bt(n4=pG%hw|>7#c#4me&1C9^U$L zTLfEsy%|J{#@tBD9@tF@XsRiLv7K9u_2a8|=Rx+x6EB{j)H+@evc#bLp2-%FUsr(QWiS^<%jvAYSNs7B2BszFMrig6tGPLiuvC zgK!aW_o<-rbiMQvOv(97oFe$Hn<#x)(Xdbjc7r-Ju%`5Im_(eu^`@m^0{Q+524NwnqcWHU)k=V&0w5 zDt~4KNdy#I(6@18&>|}b*Ga9-%mO`$R-C&Ox1S=-^nBM6MWY7`iG1%(R|Cp>Lu3jZB(y-wnRE%4+gg=rSFj<-O+)B>|5}quU zsfC^kt55iVEP-e>IxCD-oBB?DJsO72d*VWr1549J))C9HTTLHalfj#HJjJ{_xSUjX z|Le%nYN`CsCbqxGtF@%{zDjdkLi|!ob7afS9yIAGhY^FPhpU*39<1VxHr(|0M_$eL zg_vJz|7}c?Ktzw=%>=l{O^Vj`@@&H(zhcxcj)H@r8+F|7lBxGAHFY>Z5`0Y4U?19+ITwyWvf-_TUkP>l|Im`lTKUA1R8i*X+T9)aR)Q^DIV zSc@F6Gy2DS-UOPb-h&Uu1w4wUdorr~NYUkFPl!s8PKd5o*JWmjJ{g5A3koq1ximz`Hd%m<1zpKo>l7S=&}=9%qzaknxIMOetkjp>2*-$MCkG=_Yrve70>9QK0!+JiS$w;CQPJ_c)d=(6*J`sA*T4tGeDgl;@* z^ypZXAAP#{=|5^Yd98xgO4}Nvg|sPK9KjYELCjD;^V}!^&(Let&&BgyPG%(7_w5kQ zSu=`oa-+LVe-=vykFq&N!km!{Dwd1%nfNJEm75Mjk~YO5E9w+hN0Na!9W!6w`d5R{ zb9S%0EOc+G{Nm~a%DNqjYqrt&F`+b{1QL;1-MB0$&t%ftgQ?VhNy<36jqEODMVH%3dxl3oDe!gP>hIuFLE z92%U*Xx)D2@!F%+n1Wi9gQ9{`MQzz{)=(tijf#jMUqT4QeQmpQziS3Sl#`9dF(1V6 zfVc*AkaXk#!a*3cl+ysduURJr#1H==Z*Qa-2T=S2PELwVL#&t>8%AKh<@AtXr6E%( zQW9#FEC0>N(D+#y->+F`F#Ng^H{%WHBpI?(!~5t zeQk@pe3$I98vB3Rq~$!AXokb@HOec#KFY#)dsjv@C)FAFO-GH-q=K?dJ9xrqWnj<% z`3yxlf{kTS&cwW|q*P#hbt6}u1ICFh4V_q;C<~1*i+Z$q|3YS2%@FmF?&@appe7y8 z6ZwVvy|Uk>4~tT?U_6BoBxtqcS(``4w)Lod03o5s(x}1R(UDqGR-|gs`Rd6fD}GRgqg(niQp(Y26>33@F!!JCSv@c9ml<-1!Z}bl-`_D3M z+zfU}tH*^F5x$FWnD3|BpzcV;N>`<*St}A|nj4 z*7G)U?vs&Dt&JkUvQcTH&|RM11ASkwlM`8nJ`jwDX;SLEe9@fTSL``yK@ci8@1|b> zw`L_IbIh2eb_QT!xk93d&_-P_XB@8jzQ+VO$22(pO<{gG{JMLMKlCmb@5ZEzFOvgW(m1kbFWIj-^TZxO&{24yv2FJD~0+aX+65{IR<0_yc44=6*3`jF1SF zr7tDaPRyl=_@5T0)D7iu*VcmEu_g9x=?Fj{HdO12%!f&o#6unXW{MT*1? zH}B5p-E$$g?h0#u-;%xVWNeI#Cp7gJ02~3+T>Z@_Rd^0nyFt;LaKJWSuO>9R%!>P{Db8gPE2bYD7Qxs;K8*}euhB@D)Y)c)7@T88-> zg}a!7doyk$aS#Js6rmT|`0C}|%L=J75I2+6^Yv5mv?FMv#JZ;2u;;BKb<{j!A9TMA zuQ0qgbLwyK-;WoHq#JI!BxPr-Ex#@kty8-v@d|?*iQregV*lxNPVQJmQU#Tv)nFOY8M8e`E&+M+sXM3* zU|%TzUtAaZQB>!LlbQ+(@cVr>AMkBOE%e&wcbJ9lLf?M%_v6(r-y1r|d19G`N47?S zm`{AttLfFo4TFw$C#o@5>Cp2Ro3!6&^oN<-?{-~+zSyd?s8sSo&8uFX_3@cMi~3q4 zo$a|d^Ug-oV*UWt8MLPAUok8YboK8c=JS4+1eK$w`d=PLphk6Tqo#+ddSziEHT-V< zSRnK9jPF5Sfsc$`*gV9exfK@rSbJQETL<+{Tl1aqd%HK`4x_i)cfZffUB*t*gj_p) z{_=QbGrT^fC;Y^7R9smszvt`+>iw7S&bB@5%-P!GX--c;$Ix|^(Q6BOVH%;bfrK{A z)0%+N<(@`|Ky9Y9YL*P9XV>EM+P98oLLBW+I(fAbA~j^sG@#7=qZ739w&S=ofPqFZ z{P<{Qw2(!uz@)u`>P+LesGwDN+8NF-baO%Tu@T&8Fx$v?r(=3&nP0a3xRyt(nqdhx zu;Vc(vcTAJ_H5^e`u7MG@VqnwmbmkqKPC@ z&2f_Bs#PkfpD^g(f5(nzSY~!P`DQ6Pj%;2%wz0B}@Q%0(to%QArZ$4l?wq*k!PDNK zeQUcDw!sqqm%z2YpH||>N_&CbcM1u_&<=nA+=vbQdPCzZ=){M(phL(M3^qDh6FmFd zF0v0ldj5F(X6kQ7zyYjcb}$QX?9N0M3La6j6%bF*3;A28jfaErM4^aF8VANZEK{AL!n5JyGo*O+BZ2g@(nqZ`RUD z2HFfp`^VMK=FbgHUeG9aEUyYbfep1GdrBQopRW31N`P`Bpf*}}JXVkDULl?EXErtYs=_T{@fzZqmJ z!qYh}Yo-<~TvgEqJW~eM#`mOneg8d+uL9-t!P?`HH&tO>oLrzG!_e*G<vLU}i)34wBovx*{k&O6xztsH-ar z;fI-f=_>6q?130Px_AwN%J@gmHit}?t0}wxG?8?=Hb2USeQGh;_%&srUTUEESE~T- zeQaa>(OrbyNMi92)wvgxYxLG(<)cBupOmxT5^bqbsfNEFBp*6z8rXG518x^gn%rA` zJ8k$Uf-mJqqea_kwBRbVdSSlM39 z?k5Re?B%6fd9NI0^NuTfn_Bs^ZioKW z-VpUU)-?V;KzER1=kQNsIIN3}%R?vn_eL4gO0YOKH_B$Rt}@5=gtooGf?J= zcTCotfMD<+ZYUR`+x@K&_x~E_#J;E!(y@tgaR2#`{h{qxO8)WG#)zWh33kt_Uo?ei zxuEik!SOF);Ap?Hwe0<70Cchyv8Mo4Jr$9ZvTimQ(un}CfDRB{DyLW3-v~uL@P*z3 zODk81N%P@OS2{*MbnK%6%WH@UZd8HwiZ7kW(9@fiqn`RaJ~-WLgKTSPh*sNG#|k+H zY(CKV6{fVW#g8APYl%|q64_lpA1)3iluy`UkUgP$z8%O1CrPkSw%6(Q$W4r$fw zID}r?2nLndCEi`xXDXjy{CTbD~`Y8aO=%z+R=K_)Xt-ths^QUKM3Sz_AZhs&*v_ISA7#~(q__5 zkT2R4glk;Pc;_hcqCm8kxZZweYP0Hb(1SgV0jG#lY2NzT73I+v%v%?Ii~BtBp#<=3 zh}Q4DFP9zu5|Bsxj%@3#7U4(+)Ai_1&y4@vSsgX?oWB31@_4e;n6UaKciTIKLW#sx zGHtzxI6i}depbZ~xgW_gVy{T0^FamEf0^dhSpyHvX#ur-PcvfwG@4)xqWW*bJDqR) zJy2~so{n1@!&_QIv0y6Q-6oaj6fj9=M8SWiz4M}N$ZWlnqJ{J`+LWBEog&#VV?%|} zl=5e1Av??JA-0~ky=VOjVX=9(Bf9Q%ZaFEPy&a+$fdF2^9U=u#5kmb zCoE|aP2O7x9_40p>un`nM57laJ{JXfvlvlt2^AYrSJStgj+or6AqtPBoteF^e=m@= z%~OBfykAo2e`WRS>jZ<6OY}eJn+Rrc{JkV1gDVQryn<-ExhIrEvRYOKHVFCSf$0!^u4m%*wmPO9x*K7xo_jj zEKBtFZ{Za{f928~Yq`cIQlHf=gYB=NP}}Q@uU{*+e9ydTDHu&$9?igO$yVQ>h^X+G zIN4jF^Cj!DROYO3oC$Y?&r?17FTR0Eq;b*LHNfpxz8JK@R}WD?R18hR=V$wGQ016vtDg84%TON=togFmI7ffcv#$L0TR4SJh$|L;y);LMS) z1-z};VZ7xzVLfnJ@_b-OVNvY(sJHC6+TnXzzbgD#Jc4q}ReaqPJrE})hUA~M9) ztpHIyYcmHaTs8xW6c;Gi?um{fXvj(bD15DdBed^?xpIks>VZwrK%%2`3LG#-=mSVTvCk4 zU!WNVmpJ}d5B~i#XJV+&^Nl!{bF7DE&DP@?w|#Nw%u?d${9EPkUF~rSB~noMYx|iQue7^h{!hlc0QNlE&N1rq^271^7KK9+!T!_U-6~33L&Vdnm-(UA;b+?8l!5$< z4|Ib)>?(!M@c_*qs-ipmgIP35)7r2K2AbE-@0k*N_1^qa6`akh&h(^xP5b0p+3D(<0KScwn)oZux2&o&FTa^E6zI%%?qgt?+QByfI zY`~AgK`@_Te5->wgL?C*fXGM1#yZq)Lb}!y#d~crR8OtRe?TSMW8pA?Ju>E7mE6tPv$$dP4xhIEK4l;Z#HF9vXw-|F;9CFRlXk_D@dkuaN~-|FO!yB^&0)-6>{ l$`H2yo-zHX_gp)p4J|sLUs3g50RFi_VXSYiN6@(!^*@6=RyY6v literal 0 HcmV?d00001 diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 0fb99a2608..aaecef3494 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1232,6 +1232,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "3dsmax", + "environment": { + + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "djvview": { "enabled": true, "label": "DJV View", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..b6004a3feb 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -154,6 +154,7 @@ class HostsEnumEntity(BaseEnumEntity): """ schema_types = ["hosts-enum"] all_host_names = [ + "3dsmax", "aftereffects", "blender", "celaction", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json new file mode 100644 index 0000000000..f7c57298af --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json @@ -0,0 +1,39 @@ +{ + "type": "dict", + "key": "3dsmax", + "label": "Autodesk 3ds Max", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "schema_template", + "name": "template_host_unchangables" + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + }, + { + "type": "dict-modifiable", + "key": "variants", + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 20be33320d..36c5811496 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,6 +9,10 @@ "type": "schema", "name": "schema_maya" }, + { + "type": "schema", + "name": "schema_3dsmax" + }, { "type": "schema", "name": "schema_flame" From 6067b1effcca66198836b3519c1a2f9b6cd73872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 16:02:57 +0200 Subject: [PATCH 003/409] :minus: delete avalon-core submodule --- repos/avalon-core | 1 - 1 file changed, 1 deletion(-) delete mode 160000 repos/avalon-core diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From f2a1a11bec47855f1409b6620c618fa3bd89c550 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:41:57 +0200 Subject: [PATCH 004/409] :lipstick: add new publisher menu item --- .../hosts/houdini/startup/MainMenuCommon.xml | 10 ++--- openpype/tools/utils/host_tools.py | 37 +++++++++++++++++++ 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index abfa3f136e..c08114b71b 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -1,10 +1,10 @@ - + - + - + - + Date: Tue, 30 Aug 2022 18:42:44 +0200 Subject: [PATCH 005/409] :fire: remove workio workio integrated into host addon --- openpype/hosts/houdini/api/workio.py | 57 ---------------------------- 1 file changed, 57 deletions(-) delete mode 100644 openpype/hosts/houdini/api/workio.py diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py deleted file mode 100644 index 5f7efff333..0000000000 --- a/openpype/hosts/houdini/api/workio.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Host API required Work Files tool""" -import os - -import hou - - -def file_extensions(): - return [".hip", ".hiplc", ".hipnc"] - - -def has_unsaved_changes(): - return hou.hipFile.hasUnsavedChanges() - - -def save_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.save(file_name=filepath, - save_to_recent_files=True) - - return filepath - - -def open_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.load(filepath, - suppress_save_prompt=True, - ignore_load_warnings=False) - - return filepath - - -def current_file(): - - current_filepath = hou.hipFile.path() - if (os.path.basename(current_filepath) == "untitled.hip" and - not os.path.exists(current_filepath)): - # By default a new scene in houdini is saved in the current - # working directory as "untitled.hip" so we need to capture - # that and consider it 'not saved' when it's in that state. - return None - - return current_filepath - - -def work_root(session): - work_dir = session["AVALON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return os.path.join(work_dir, scene_dir) - else: - return work_dir From 2f6a6cfc9a2676d3361e4fc11e0e182de2a4057d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:15 +0200 Subject: [PATCH 006/409] :alien: implement creator methods --- openpype/hosts/houdini/api/plugin.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 64abfe9ef9..fc36284a72 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -3,17 +3,17 @@ import sys import six from abc import ( - ABCMeta, - abstractmethod, - abstractproperty + ABCMeta ) import six import hou from openpype.pipeline import ( CreatorError, LegacyCreator, - Creator as NewCreator + Creator as NewCreator, + CreatedInstance ) +from openpype.hosts.houdini.api import list_instances, remove_instance from .lib import imprint @@ -97,10 +97,17 @@ class HoudiniCreator(NewCreator): _nodes = [] def collect_instances(self): - pass + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): - pass + for created_inst, _changes in update_list: + imprint(created_inst.get("instance_id"), created_inst.data_to_store()) def remove_instances(self, instances): - pass \ No newline at end of file + for instance in instances: + remove_instance(instance) + self._remove_instance_from_context(instance) From 20e25e111bdd41b31415142d3f3fd74460ebbaaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:48 +0200 Subject: [PATCH 007/409] :alien: change houdini to host addon --- openpype/hosts/houdini/api/__init__.py | 32 +--- openpype/hosts/houdini/api/lib.py | 52 ++++-- openpype/hosts/houdini/api/pipeline.py | 167 +++++++++++------- .../houdini/startup/python2.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.9libs/pythonrc.py | 6 +- 6 files changed, 158 insertions(+), 111 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index fddf7ab98d..f29df021e1 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,24 +1,15 @@ from .pipeline import ( - install, - uninstall, - + HoudiniHost, ls, containerise, + list_instances, + remove_instance ) from .plugin import ( Creator, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - from .lib import ( lsattr, lsattrs, @@ -29,22 +20,15 @@ from .lib import ( __all__ = [ - "install", - "uninstall", + "HoudiniHost", "ls", "containerise", + "list_instances", + "remove_instance", "Creator", - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - # Utility functions "lsattr", "lsattrs", @@ -52,7 +36,3 @@ __all__ = [ "maintained_selection" ] - -# Backwards API compatibility -open = open_file -save = save_file diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ab33fdc3f6..675f3afcb5 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- +import sys import uuid import logging from contextlib import contextmanager +import json import six @@ -8,9 +11,11 @@ from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import get_current_project_asset - import hou + +self = sys.modules[__name__] +self._parent = None log = logging.getLogger(__name__) @@ -29,23 +34,18 @@ def set_id(node, unique_id, overwrite=False): def get_id(node): - """ - Get the `cbId` attribute of the given node + """Get the `cbId` attribute of the given node. + Args: node (hou.Node): the name of the node to retrieve the attribute from Returns: - str + str: cbId attribute of the node. """ - if node is None: - return - - id = node.parm("id") - if node is None: - return - return id + if node is not None: + return node.parm("id") def generate_ids(nodes, asset_id=None): @@ -325,6 +325,11 @@ def imprint(node, data): label=key, num_components=1, default_value=(value,)) + elif isinstance(value, dict): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) @@ -397,8 +402,20 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects - return {parameter.name(): parameter.eval() for - parameter in node.spareParms()} + data = {} + for parameter in node.spareParms(): + value = parameter.eval() + # test if value is json encoded dict + if isinstance(value, six.string_types) and \ + len(value) > 0 and value[0] == "{": + try: + value = json.loads(value) + except json.JSONDecodeError: + # not a json + pass + data[parameter.name()] = value + + return data @contextmanager @@ -477,4 +494,11 @@ def load_creator_code_to_asset( definition = definitions[0] # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) \ No newline at end of file + definition.addSection("PythonCook", source) + + +def get_main_window(): + """Acquire Houdini's main window""" + if self._parent is None: + self._parent = hou.ui.mainQtWindow() + return self._parent diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 2ae8a4dbf7..b8479a7b25 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -3,7 +3,10 @@ import sys import logging import contextlib -import hou +import hou # noqa + +from openpype.host import HostBase, IWorkfileHost, ILoadHost +from openpype.tools.utils import host_tools import pyblish.api @@ -35,70 +38,96 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -self = sys.modules[__name__] -self._has_been_setup = False -self._parent = None -self._events = dict() +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): + name = "houdini" + def __init__(self): + super(HoudiniHost, self).__init__() + self._op_events = {} + self._has_been_setup = False -def install(): - _register_callbacks() + def install(self): + pyblish.api.register_host("houdini") + pyblish.api.register_host("hython") + pyblish.api.register_host("hpython") - pyblish.api.register_host("houdini") - pyblish.api.register_host("hython") - pyblish.api.register_host("hpython") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + register_event_callback("before.save", before_save) + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - register_event_callback("before.save", before_save) - register_event_callback("save", on_save) - register_event_callback("open", on_open) - register_event_callback("new", on_new) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) + self._has_been_setup = True + # add houdini vendor packages + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") - self._has_been_setup = True - # add houdini vendor packages - hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") + sys.path.append(hou_pythonpath) - sys.path.append(hou_pythonpath) + # Set asset settings for the empty scene directly after launch of Houdini + # so it initializes into the correct scene FPS, Frame Range, etc. + # todo: make sure this doesn't trigger when opening with last workfile + _set_context_settings() - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile - _set_context_settings() + def has_unsaved_changes(self): + return hou.hipFile.hasUnsavedChanges() + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] -def uninstall(): - """Uninstall Houdini-specific functionality of avalon-core. + def save_workfile(self, dst_path=None): + # Force forwards slashes to avoid segfault + filepath = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=filepath, + save_to_recent_files=True) + return filepath - This function is called automatically on calling `api.uninstall()`. - """ + def open_workfile(self, filepath): + # Force forwards slashes to avoid segfault + filepath = filepath.replace("\\", "/") - pyblish.api.deregister_host("hython") - pyblish.api.deregister_host("hpython") - pyblish.api.deregister_host("houdini") + hou.hipFile.load(filepath, + suppress_save_prompt=True, + ignore_load_warnings=False) + return filepath -def _register_callbacks(): - for event in self._events.copy().values(): - if event is None: - continue + def get_current_workfile(self): + current_filepath = hou.hipFile.path() + if (os.path.basename(current_filepath) == "untitled.hip" and + not os.path.exists(current_filepath)): + # By default a new scene in houdini is saved in the current + # working directory as "untitled.hip" so we need to capture + # that and consider it 'not saved' when it's in that state. + return None - try: - hou.hipFile.removeEventCallback(event) - except RuntimeError as e: - log.info(e) + return current_filepath - self._events[on_file_event_callback] = hou.hipFile.addEventCallback( - on_file_event_callback - ) + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + hou.hipFile.removeEventCallback(event) + except RuntimeError as e: + log.info(e) + + self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback( + on_file_event_callback + ) def on_file_event_callback(event): @@ -112,22 +141,6 @@ def on_file_event_callback(event): emit_event("new") -def get_main_window(): - """Acquire Houdini's main window""" - if self._parent is None: - self._parent = hou.ui.mainQtWindow() - return self._parent - - -def teardown(): - """Remove integration""" - if not self._has_been_setup: - return - - self._has_been_setup = False - print("pyblish: Integration torn down successfully") - - def containerise(name, namespace, nodes, @@ -250,7 +263,7 @@ def on_open(): log.warning("Scene has outdated content.") # Get main window - parent = get_main_window() + parent = lib.get_main_window() if parent is None: log.info("Skipping outdated content pop-up " "because Houdini window can't be found.") @@ -370,3 +383,27 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) + + +def list_instances(): + """List all publish instances in the scene.""" + return lib.lsattr("id", "pyblish.avalon.instance") + + +def remove_instance(instance): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer instance, + because it might contain valuable data for artist. + + """ + nodes = instance[:] + if not nodes: + return + + # Assume instance node is first node + instance_node = nodes[0] + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + instance_node.removeSpareParmTuple(parameter) diff --git a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() From 8ce7d45dd9ff120c959e302636134ca29c8a7bb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:46:00 +0200 Subject: [PATCH 008/409] :construction: change to new creator style --- .../houdini/plugins/create/create_pointcache.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 27112260ad..052580b56f 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,14 +1,23 @@ +# -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import list_instances +from openpype.pipeline import CreatedInstance class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - - name = "pointcache" + identifier = "pointcache" label = "Point Cache" family = "pointcache" icon = "gears" + def collect_instances(self): + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + def create(self, subset_name, instance_data, pre_create_data): pass From 1ca386c78d48cb3903499dd1d7adc5d1ac333a69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:46:53 +0200 Subject: [PATCH 009/409] :bug: add required key variant --- openpype/pipeline/create/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..1b2521e4f7 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -435,6 +435,7 @@ class CreatedInstance: if key in data: data.pop(key) + self._data["variant"] = self._data.get("variant") or "" # Stored creator specific attribute values # {key: value} creator_values = copy.deepcopy(orig_creator_attributes) From d2233bc6f8c5c2541ad04c66cafa5e3419c2fbae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:47:58 +0200 Subject: [PATCH 010/409] :wrench: new style creator --- openpype/hosts/houdini/api/lib.py | 97 ++++++++++++------- openpype/hosts/houdini/api/pipeline.py | 35 +++++-- openpype/hosts/houdini/api/plugin.py | 61 ++++++++++-- .../plugins/create/create_pointcache.py | 55 ++++------- 4 files changed, 164 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 675f3afcb5..5d99d7f363 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -281,7 +281,7 @@ def render_rop(ropnode): raise RuntimeError("Render failed: {0}".format(exc)) -def imprint(node, data): +def imprint(node, data, update=False): """Store attributes with value on a node Depending on the type of attribute it creates the correct parameter @@ -293,51 +293,50 @@ def imprint(node, data): Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value + update (bool, optional): flag if imprint should update + already existing data or leave them untouched and only + add new. Returns: None """ + if not data: + return + + current_parameters = node.spareParms() + current_keys = [p.name() for p in current_parameters] + update_keys = [] parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + templates = [] for key, value in data.items(): if value is None: continue - if isinstance(value, float): - parm = hou.FloatParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, bool): - parm = hou.ToggleParmTemplate(name=key, - label=key, - default_value=value) - elif isinstance(value, int): - parm = hou.IntParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, six.string_types): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, dict): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(json.dumps(value),)) - else: - raise TypeError("Unsupported type: %r" % type(value)) - - parm_folder.addParmTemplate(parm) - + if key in current_keys: + if not update: + print(f"{key} already exists on {node}") + else: + print(f"replacing {key}") + update_keys.append((key, value)) + continue + parm = parm_to_template(key, value) + # parm.hide(True) + templates.append(parm) + parm_folder.setParmTemplates(templates) parm_group.append(parm_folder) node.setParmTemplateGroup(parm_group) + if update_keys: + parms = node.parmTuplesInFolder(("Extra",)) + for parm in parms: + for key, value in update_keys: + if parm.name() == key: + node.replaceSpareParmTuple( + parm.name(), parm_to_template(key, value)) + def lsattr(attr, value=None, root="/"): """Return nodes that have `attr` @@ -407,9 +406,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value[0] == "{": + len(value) > 0 and value.startswith("JSON:::"): try: - value = json.loads(value) + value = json.loads(value.lstrip("JSON:::")) except json.JSONDecodeError: # not a json pass @@ -502,3 +501,35 @@ def get_main_window(): if self._parent is None: self._parent = hou.ui.mainQtWindow() return self._parent + + +def parm_to_template(key, value): + if isinstance(value, float): + parm = hou.FloatParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, bool): + parm = hou.ToggleParmTemplate(name=key, + label=key, + default_value=value) + elif isinstance(value, int): + parm = hou.IntParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, six.string_types): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, (dict, list, tuple)): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=( + "JSON:::" + json.dumps(value),)) + else: + raise TypeError("Unsupported type: %r" % type(value)) + + return parm \ No newline at end of file diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b8479a7b25..6daf942cf0 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -5,8 +5,7 @@ import contextlib import hou # noqa -from openpype.host import HostBase, IWorkfileHost, ILoadHost -from openpype.tools.utils import host_tools +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api @@ -38,7 +37,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): name = "houdini" def __init__(self): @@ -129,6 +128,16 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): on_file_event_callback ) + def update_context_data(self, data, changes): + root_node = hou.node("/") + lib.imprint(root_node, data) + + def get_context_data(self): + from pprint import pformat + + self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) + return lib.read(hou.node("/")) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: @@ -385,9 +394,15 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(): - """List all publish instances in the scene.""" - return lib.lsattr("id", "pyblish.avalon.instance") +def list_instances(creator_id=None): + """List all publish instances in the scene. + + """ + instance_signature = { + "id": "pyblish.avalon.instance", + "identifier": creator_id + } + return lib.lsattrs(instance_signature) def remove_instance(instance): @@ -397,13 +412,15 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance[:] + nodes = instance.get("members") if not nodes: return # Assume instance node is first node - instance_node = nodes[0] + instance_node = hou.node(nodes[0]) + to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ parameter.eval() == "pyblish.avalon.instance": - instance_node.removeSpareParmTuple(parameter) + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fc36284a72..7120a49e41 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,9 @@ from openpype.pipeline import ( Creator as NewCreator, CreatedInstance ) +from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,18 +97,64 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): _nodes = [] - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self._nodes = hou.selectedNodes() + + # Get the node type and remove it from the data, not needed + node_type = instance_data.pop("node_type", None) + if node_type is None: + node_type = "geometry" + + # Get out node + out = hou.node("/out") + instance_node = out.createNode( + node_type, node_name=subset_name) + instance_node.moveToGoodPosition() + instance_data["members"] = [instance_node.path()] + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) self._add_instance_to_context(instance) + imprint(instance_node, instance.data_to_store()) + return instance + + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def collect_instances(self): + for instance in list_instances(creator_id=self.identifier): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): for created_inst, _changes in update_list: - imprint(created_inst.get("instance_id"), created_inst.data_to_store()) + instance_node = hou.node(created_inst.get("members")[0]) + current_data = read(instance_node) + + imprint( + instance_node, + { + key: value[1] for key, value in _changes.items() + if current_data.get(key) != value[1] + }, + update=True + ) def remove_instances(self, instances): for instance in instances: remove_instance(instance) self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 052580b56f..686dbaa7ab 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api import list_instances from openpype.pipeline import CreatedInstance +import hou + class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" @@ -11,50 +12,34 @@ class CreatePointCache(plugin.HoudiniCreator): family = "pointcache" icon = "gears" - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, instance_data, pre_create_data): - pass + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - def __init__(self, *args, **kwargs): - super(CreatePointCache, self).__init__(*args, **kwargs) + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) - - self.data.update({"node_type": "alembic"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("members")[0]) parms = { - "use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output + "use_sop_path": True, + "build_from_path": True, + "path_attrib": "path", "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name, + "format": 2, + "facesets": 0, + "filename": "$HIP/pyblish/{}.abc".format(self.identifier) } - if self.nodes: - node = self.nodes[0] - parms.update({"sop_path": node.path()}) + if instance_node: + parms["sop_path"] = instance_node.path() - instance.setParms(parms) - instance.parm("trange").set(1) + instance_node.setParms(parms) + instance_node.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From e189b21e543bf0480d0dba31dd18c2b2107104c6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:05 +0200 Subject: [PATCH 011/409] :bug: set AttributeValues as new style class --- openpype/pipeline/create/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 1b2521e4f7..2962f43443 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -85,7 +85,7 @@ class InstanceMember: }) -class AttributeValues: +class AttributeValues(object): """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for From 13dd125e2677bda06f5afe21971a4e9893b01b5a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:37 +0200 Subject: [PATCH 012/409] :rotating_light: remove debug prints --- openpype/hosts/houdini/api/pipeline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6daf942cf0..92761b7b4e 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" import os import sys import logging @@ -72,9 +74,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): sys.path.append(hou_pythonpath) - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile + # Set asset settings for the empty scene directly after launch of + # Houdini so it initializes into the correct scene FPS, + # Frame Range, etc. + # TODO: make sure this doesn't trigger when + # opening with last workfile. _set_context_settings() def has_unsaved_changes(self): @@ -133,9 +137,6 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): lib.imprint(root_node, data) def get_context_data(self): - from pprint import pformat - - self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) return lib.read(hou.node("/")) From f09cd22e7ce6b8546f8a74f7b847edc2bf63eef5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:06 +0200 Subject: [PATCH 013/409] :recycle: remove unused import --- openpype/hosts/houdini/api/plugin.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 7120a49e41..ff747085da 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" import sys -import six from abc import ( ABCMeta ) From c0263462663f2d099a1db47850152fe7b6ee1791 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:40 +0200 Subject: [PATCH 014/409] :bug: set output name to subset name --- openpype/hosts/houdini/plugins/create/create_pointcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 686dbaa7ab..3365e25091 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -29,7 +29,7 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(self.identifier) + "filename": "$HIP/pyblish/{}.abc".format(subset_name) } if instance_node: From 27d131f0eea1dfb74b750a0a6a1cc622d152b2ca Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:57:16 +0200 Subject: [PATCH 015/409] :recycle: optimize imprint function --- openpype/hosts/houdini/api/lib.py | 85 +++++++++++++++---------------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 5d99d7f363..f438944b09 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -17,7 +17,7 @@ import hou self = sys.modules[__name__] self._parent = None log = logging.getLogger(__name__) - +JSON_PREFIX = "JSON:::" def get_asset_fps(): """Return current asset fps.""" @@ -290,6 +290,11 @@ def imprint(node, data, update=False): http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html + Because of some update glitch where you cannot overwrite existing + ParmTemplates on node using: + `setParmTemplates()` and `parmTuplesInFolder()` + update is done in another pass. + Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value @@ -304,38 +309,48 @@ def imprint(node, data, update=False): if not data: return - current_parameters = node.spareParms() - current_keys = [p.name() for p in current_parameters] - update_keys = [] - - parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + current_parms = {p.name(): p for p in node.spareParms()} + update_parms = [] templates = [] + for key, value in data.items(): if value is None: continue - if key in current_keys: + parm = get_template_from_value(key, value) + + if key in current_parms.keys(): if not update: - print(f"{key} already exists on {node}") + log.debug("{} already exists on {}".format(key, node)) else: - print(f"replacing {key}") - update_keys.append((key, value)) + log.debug("replacing {}".format(key)) + update_parms.append(parm) continue - parm = parm_to_template(key, value) # parm.hide(True) templates.append(parm) - parm_folder.setParmTemplates(templates) - parm_group.append(parm_folder) + + parm_group = node.parmTemplateGroup() + parm_folder = parm_group.findFolder("Extra") + + # if folder doesn't exist yet, create one and append to it, + # else append to existing one + if not parm_folder: + parm_folder = hou.FolderParmTemplate("folder", "Extra") + parm_folder.setParmTemplates(templates) + parm_group.append(parm_folder) + else: + for template in templates: + parm_group.appendToFolder(parm_folder, template) + node.setParmTemplateGroup(parm_group) - if update_keys: - parms = node.parmTuplesInFolder(("Extra",)) - for parm in parms: - for key, value in update_keys: - if parm.name() == key: - node.replaceSpareParmTuple( - parm.name(), parm_to_template(key, value)) + # TODO: Updating is done here, by calling probably deprecated functions. + # This needs to be addressed in the future. + if not update_parms: + return + + for parm in update_parms: + node.replaceSpareParmTuple(parm.name(), parm) def lsattr(attr, value=None, root="/"): @@ -406,9 +421,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value.startswith("JSON:::"): + value.startswith(JSON_PREFIX): try: - value = json.loads(value.lstrip("JSON:::")) + value = json.loads(value[len(JSON_PREFIX):]) except json.JSONDecodeError: # not a json pass @@ -478,24 +493,6 @@ def reset_framerange(): hou.setFrame(frame_start) -def load_creator_code_to_asset( - otl_file_path, node_type_name, source_file_path): - # type: (str, str, str) -> None - # Load the Python source code. - with open(source_file_path, "rb") as src: - source = src.read() - - # Find the asset definition in the otl file. - definitions = [definition - for definition in hou.hda.definitionsInFile(otl_file_path) - if definition.nodeTypeName() == node_type_name] - assert(len(definitions) == 1) - definition = definitions[0] - - # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) - - def get_main_window(): """Acquire Houdini's main window""" if self._parent is None: @@ -503,7 +500,7 @@ def get_main_window(): return self._parent -def parm_to_template(key, value): +def get_template_from_value(key, value): if isinstance(value, float): parm = hou.FloatParmTemplate(name=key, label=key, @@ -528,8 +525,8 @@ def parm_to_template(key, value): label=key, num_components=1, default_value=( - "JSON:::" + json.dumps(value),)) + JSON_PREFIX + json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) - return parm \ No newline at end of file + return parm From fe1a1055c27072a73d45172389b603b69d19d296 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 5 Sep 2022 18:03:38 +0200 Subject: [PATCH 016/409] :bug: store context on dedicated node instead of root node root node doesn't allow storing of spare parameters --- openpype/hosts/houdini/api/pipeline.py | 32 +++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 92761b7b4e..4ff6873ced 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -30,6 +30,7 @@ from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" +CONTEXT_CONTAINER = "/obj/OpenPypeContext" IS_HEADLESS = not hasattr(hou, "ui") PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") @@ -132,12 +133,37 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): on_file_event_callback ) + @staticmethod + def _create_context_node(): + """Helper for creating context holding node. + + Returns: + hou.Node: context node + + """ + obj_network = hou.node("/obj") + op_ctx = obj_network.createNode( + "null", node_name="OpenPypeContext") + op_ctx.moveToGoodPosition() + op_ctx.setBuiltExplicitly(False) + op_ctx.setCreatorState("OpenPype") + op_ctx.setComment("OpenPype node to hold context metadata") + op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) + op_ctx.hide(True) + return op_ctx + def update_context_data(self, data, changes): - root_node = hou.node("/") - lib.imprint(root_node, data) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + + lib.imprint(op_ctx, data) def get_context_data(self): - return lib.read(hou.node("/")) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + return lib.read(op_ctx) def on_file_event_callback(event): From 1a7a52f44cb5dbc07b1fc53c9592c79d6da5156e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 6 Sep 2022 16:40:09 +0200 Subject: [PATCH 017/409] :recycle: members as nodes, change access to members --- .../hosts/houdini/plugins/publish/collect_active_state.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_frames.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_instances.py | 6 ++++++ .../hosts/houdini/plugins/publish/collect_output_node.py | 2 +- .../hosts/houdini/plugins/publish/collect_redshift_rop.py | 2 +- .../houdini/plugins/publish/collect_render_products.py | 2 +- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 4 ++-- openpype/hosts/houdini/plugins/publish/extract_alembic.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_ass.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_composite.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 2 +- .../hosts/houdini/plugins/publish/extract_redshift_proxy.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_usd.py | 2 +- .../hosts/houdini/plugins/publish/extract_usd_layered.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 2 +- .../plugins/publish/validate_abc_primitive_to_detail.py | 2 +- .../houdini/plugins/publish/validate_alembic_face_sets.py | 2 +- .../houdini/plugins/publish/validate_animation_settings.py | 2 +- openpype/hosts/houdini/plugins/publish/validate_bypass.py | 2 +- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../houdini/plugins/publish/validate_file_extension.py | 2 +- .../hosts/houdini/plugins/publish/validate_frame_token.py | 2 +- .../hosts/houdini/plugins/publish/validate_no_errors.py | 2 +- .../plugins/publish/validate_primitive_hierarchy_paths.py | 2 +- .../houdini/plugins/publish/validate_sop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 +- .../houdini/plugins/publish/validate_usd_model_and_shade.py | 2 +- .../houdini/plugins/publish/validate_usd_output_node.py | 2 +- .../hosts/houdini/plugins/publish/validate_usd_setdress.py | 2 +- .../houdini/plugins/publish/validate_usd_shade_workspace.py | 2 +- .../houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 32 files changed, 38 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 862d5720e1..dd83721358 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -24,7 +24,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance[0] + node = instance.data["members"][0] if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9bd43d8a09..cad894cc3f 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -24,7 +24,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index d38927984a..0187a1f1d8 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin): if node.evalParm("id") != "pyblish.avalon.instance": continue + # instance was created by new creator code, skip it as + # it is already collected. + if node.parm("creator_identifier"): + continue + has_family = node.evalParm("family") assert has_family, "'%s' is missing 'family'" % node.name() @@ -78,6 +83,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] + instance.data["members"] = [node] instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index 0130c0a8da..a3989dc776 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 72b554b567..33bf74610a 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index d7163b43c0..e88c5ea0e6 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance[0].path() + rop_path = instance.data["members"][0].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index e3985e3c97..c0a55722a5 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -19,7 +19,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance[0] + rop_node = instance.data["members"][0] save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -54,7 +54,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance[0]) # include same USD ROP + layer_inst.append(instance.data["members"][0]) # include same USD ROP layer_inst.append((layer, save_path)) # include layer data # Allow this subset to be grouped into a USD Layer on creation diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 83b790407f..7f1e98c0af 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -14,7 +14,7 @@ class ExtractAlembic(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index e56e40df85..03ca899c5b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -14,7 +14,7 @@ class ExtractAss(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index f300b6d28d..eb77a91d62 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -15,7 +15,7 @@ class ExtractComposite(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 301dd4e297..4352939a2c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance[0] + hda_node = instance.data["members"][0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index c754d60c59..b440b1d2ee 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -14,7 +14,7 @@ class ExtractRedshiftProxy(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0fc26900fb..9fa68178f4 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -16,7 +16,7 @@ class ExtractUSD(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 80919c023b..6214e65655 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(openpype.api.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance[0] + node = instance.data["members"][0] # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 113e1b0bcb..a30854333e 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -14,7 +14,7 @@ class ExtractVDBCache(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 3e17d3e8de..b97978d927 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -33,7 +33,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index e9126ffef0..ee59eed35e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 5eb8f93d03..32c5078b9f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -36,7 +36,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index fc4e18f701..6a37009549 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -34,6 +34,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance[0] + rop = instance.data["members"][0] if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index a0919e1323..4433f5712b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -14,7 +14,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] if node.parm("use_sop_path").eval(): raise RuntimeError( "Alembic ROP for Camera export should not be " diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 543539ffe3..86ddc2adf2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -33,7 +33,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index b26d28a1e7..f050a41b88 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -37,7 +37,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance[0] + node = instance.data["members"][0] # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index 76b5910576..b65e9ef62e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -36,7 +36,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f58e5f8d7d..46210bda61 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -37,7 +37,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance[0]) + validate_nodes.append(instance.data["members"][0]) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 1eb36763bb..a0e580fbf0 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -30,7 +30,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() if not build_from_path: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a5a07b1b1a..a2a9c1f4ea 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -35,7 +35,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index ac0181aed2..95cad82085 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -24,7 +24,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 2fd2f5eb9f..bdb7c05319 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -37,7 +37,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 1f10fafdf4..0c38ccd4be 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -33,7 +33,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index fb1094e6b5..835cd5977a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -21,7 +21,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): from pxr import UsdGeom - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a77ca2f3cb..c5218c203d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -19,7 +19,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 1ba840b71d..ac87fa8fed 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -36,7 +36,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path() + "ROP node '%s'." % instance.data["members"][0].path() ) return [instance] From 44518d2d85dcabe808c19b2f24ca64f21d096d90 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:15 +0200 Subject: [PATCH 018/409] :sparkles: add collector for member nodes --- .../publish/collect_members_as_nodes.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py new file mode 100644 index 0000000000..07d71c6605 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +import pyblish.api +import hou + + +class CollectMembersAsNodes(pyblish.api.InstancePlugin): + """Collects instance members as Houdini nodes.""" + + order = pyblish.api.CollectorOrder - 0.01 + hosts = ["houdini"] + label = "Collect Members as Nodes" + + def process(self, instance): + if not instance.data.get("creator_identifier"): + return + + nodes = [ + hou.node(member) for member in instance.data.get("members", []) + ] + + instance.data["members"] = nodes From 31c0e9050b84b015f104ba7d08275563b75dbbc6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:37 +0200 Subject: [PATCH 019/409] :rotating_light: fix hound :dog: --- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c0a55722a5..c21b336403 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -54,8 +54,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance.data["members"][0]) # include same USD ROP - layer_inst.append((layer, save_path)) # include layer data + # include same USD ROP + layer_inst.append(instance.data["members"][0]) + # include layer data + layer_inst.append((layer, save_path)) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" From 26954b9377639b12fdbf3f67e36b0edf86582018 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:19 +0200 Subject: [PATCH 020/409] :recycle: fix name typo and refactor validator error --- .../publish/help/validate_vdb_input_node.xml | 21 +++++++++ .../plugins/publish/valiate_vdb_input_node.py | 47 ------------------- .../publish/validate_vdb_input_node.py | 13 +++-- 3 files changed, 30 insertions(+), 51 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml delete mode 100644 openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py deleted file mode 100644 index ac408bc842..0000000000 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = ValidateContentsOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index ac408bc842..1f9ccc9c42 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import ( + PublishValidationError +) class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" @@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" + raise PublishValidationError( + self, + "Node connected to the output node is not of type VDB", + title=self.label ) @classmethod From 59c13789e6924a700e269c30bec2d62327acbf09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:44 +0200 Subject: [PATCH 021/409] :rotating_light: fix hound --- openpype/hosts/houdini/plugins/publish/collect_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0187a1f1d8..0582ee154c 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -63,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin): data.update({"active": not node.isBypassed()}) # temporarily translation of `active` to `publish` till issue has - # been resolved, https://github.com/pyblish/pyblish-base/issues/307 + # been resolved. + # https://github.com/pyblish/pyblish-base/issues/307 if "active" in data: data["publish"] = data["active"] From 3b25a68552c6ec1c41f9351bdfcd5bde6626310f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:09:09 +0200 Subject: [PATCH 022/409] :recycle: work on validation errors --- .../publish/help/validate_sop_output_node.xml | 21 +++++++++++++++++++ .../publish/validate_sop_output_node.py | 9 +++++--- 2 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a2a9c1f4ea..02b650d48e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishXmlValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -22,9 +24,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishXmlValidationError( + self, + message="Output node(s) `%s` are incorrect. " % invalid, + title=self.label ) @classmethod From 008479022108e013110c22c1eb95e2e026fb2938 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:14:03 +0200 Subject: [PATCH 023/409] :pencil2: fix typo in import --- openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index cf8d61cda3..81274c670e 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from openyppe.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib From 9e1fb2bc6c979b8a31cf3630af2b5ea76e58a337 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:10 +0200 Subject: [PATCH 024/409] :fire: delete validation error help file --- .../publish/help/validate_sop_output_node.xml | 21 ------------------- 1 file changed, 21 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml deleted file mode 100644 index 0f92560bf7..0000000000 --- a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - -Scene setting - -## Invalid input node - -VDB input must have the same number of VDBs, points, primitives and vertices as output. - - - -### __Detailed Info__ (optional) - -A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - - \ No newline at end of file From 831050799d6a1b1f0b1a51bcbc16f62fbd39f96c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:46 +0200 Subject: [PATCH 025/409] :bug: pass argument in deprecated function --- openpype/host/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index cbf12b0d13..03c731d0e4 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -252,7 +252,7 @@ class IWorkfileHost: Remove when all usages are replaced. """ - self.save_workfile() + self.save_workfile(dst_path) def open_file(self, filepath): """Deprecated variant of 'open_workfile'. From e1a504ff3a831f5bd3ee5dd36914239613cb7b7c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:55:16 +0200 Subject: [PATCH 026/409] :recycle: refactor to new function calls --- openpype/hosts/houdini/plugins/publish/save_scene.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index 6128c7af77..d6e07ccab0 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): # Filename must not have changed since collecting host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() assert context.data['currentFile'] == current_file, ( "Collected filename from current scene name." ) if host.has_unsaved_changes(): - self.log.info("Saving current file..") - host.save_file(current_file) + self.log.info("Saving current file {}...".format(current_file)) + host.save_workfile(current_file) else: self.log.debug("No unsaved changes, skipping file save..") From 3501d0d23a78fbaef106da2fffe946cb49bef855 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:36:43 +0200 Subject: [PATCH 027/409] :wastebasket: move deprecation marks from comments to docstrings --- openpype/action.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/action.py b/openpype/action.py index de9cdee010..15c96404b6 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) -# 'RepairAction' and 'RepairContextAction' were moved to -# 'openpype.pipeline.publish' please change you imports. -# There is no "reasonable" way hot mark these classes as deprecated to show -# warning of wrong import. -# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in @@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) -# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in From d59e188ab003d56d6ce8a71947f973b4a732ea01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:37:27 +0200 Subject: [PATCH 028/409] :recycle: add instance_node as separate parameter --- openpype/hosts/houdini/api/plugin.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ff747085da..f300496a43 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -111,7 +111,12 @@ class HoudiniCreator(NewCreator): instance_node = out.createNode( node_type, node_name=subset_name) instance_node.moveToGoodPosition() + + # wondering if we'll ever need more than one member here + # in Houdini instance_data["members"] = [instance_node.path()] + instance_data["instance_node"] = instance_node.path() + instance = CreatedInstance( self.family, subset_name, @@ -136,7 +141,7 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: - instance_node = hou.node(created_inst.get("members")[0]) + instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) imprint( From 42c6c846e479c344b6021101a5aa5d744372447a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:38:05 +0200 Subject: [PATCH 029/409] :alien: change error handling --- .../validate_abc_primitive_to_detail.py | 31 +++++++----- .../publish/validate_alembic_input_node.py | 27 +++++++---- .../plugins/publish/validate_camera_rop.py | 47 +++++++++++++------ .../validate_primitive_hierarchy_paths.py | 26 ++++++---- .../publish/validate_sop_output_node.py | 11 ++--- .../publish/validate_workfile_paths.py | 19 ++++++-- 6 files changed, 109 insertions(+), 52 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 40949b7042..55c705c65b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from collections import defaultdict - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" @@ -24,15 +24,24 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitives found with inconsistent primitive " - "to detail attributes. See log." + raise PublishValidationError( + ("Primitives found with inconsistent primitive " + "to detail attributes. See log."), + title=self.label ) @classmethod def get_invalid(cls, instance): - output = instance.data["output_node"] + output_node = instance.data.get("output_node") + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() @@ -67,7 +76,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually @@ -86,7 +95,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -94,7 +103,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = None for attr in pattern.split(" "): @@ -130,4 +139,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Path has multiple values: %s (path: %s)" % (list(values), path) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 2625ae5f83..aa572dc3bb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,5 @@ import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -12,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -20,18 +19,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitive types found that are not supported" - "for Alembic output." + raise PublishValidationError( + ("Primitive types found that are not supported" + "for Alembic output."), + title=self.label ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] - node = instance.data["output_node"] + output_node = instance.data.get("output_node") - if not hasattr(node, "geometry"): + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] + + if not hasattr(output_node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context # then for now we ignore - this allows us to also @@ -40,7 +49,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): return frame = instance.data.get("frameStart", 0) - geo = node.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) invalid = False for prim_type in invalid_prim_types: diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index f97c46ae9d..18fed7fbc4 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- +"""Validator plugin for Houdini Camera ROP settings.""" import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" @@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data.get("instance_node")) if node.parm("use_sop_path").eval(): - raise RuntimeError( - "Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable." + raise PublishValidationError( + ("Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable."), + title=self.label ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() objects = node.parm("objects").eval() - assert root, "Root parameter must be set on Alembic ROP" - assert root.startswith("/"), "Root parameter must start with slash /" - assert objects, "Objects parameter must be set on Alembic ROP" - assert len(objects.split(" ")) == 1, "Must have only a single object." + errors = [] + if not root: + errors.append("Root parameter must be set on Alembic ROP") + if not root.startswith("/"): + errors.append("Root parameter must start with slash /") + if not objects: + errors.append("Objects parameter must be set on Alembic ROP") + if len(objects.split(" ")) != 1: + errors.append("Must have only a single object.") + + if errors: + for error in errors: + self.log.error(error) + raise PublishValidationError( + "Some checks failed, see validator log.", + title=self.label) # Check if the object exists and is a camera path = root + "/" + objects camera = hou.node(path) if not camera: - raise ValueError("Camera path does not exist: %s" % path) + raise PublishValidationError( + "Camera path does not exist: %s" % path, + title=self.label) if camera.type().name() != "cam": - raise ValueError( - "Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name()) - ) + raise PublishValidationError( + ("Object set in Alembic ROP is not a camera: " + "{} (type: {})").format(camera, camera.type().name()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 10100b698e..e1f1dc116e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -19,16 +21,24 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "See log for details. " "Invalid nodes: {0}".format(invalid) + raise PublishValidationError( + "See log for details. " "Invalid nodes: {0}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): - import hou + output_node = instance.data.get("output_node") - output = instance.data["output_node"] + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() @@ -52,7 +62,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the current frame then we can't # check whether the path names are correct. So we'll just issue a @@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = geo.primStringAttribValues(path_attr) # Ensure all primitives are set to a valid path @@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index 02b650d48e..c18ad7a1b7 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import PublishValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -24,10 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise PublishXmlValidationError( - self, - message="Output node(s) `%s` are incorrect. " % invalid, - title=self.label + raise PublishValidationError( + "Output node(s) are incorrect", + title="Invalid output node(s)" ) @classmethod @@ -35,7 +34,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + output_node = instance.data.get("output_node") if output_node is None: node = instance.data["members"][0] diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 79b3e894e5..f7a4c762cc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -2,22 +2,30 @@ import openpype.api import pyblish.api import hou +from openpype.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from openpype.pipeline.publish import RepairAction -class ValidateWorkfilePaths(pyblish.api.InstancePlugin): +class ValidateWorkfilePaths( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate workfile paths so they are absolute.""" order = pyblish.api.ValidatorOrder families = ["workfile"] hosts = ["houdini"] label = "Validate Workfile Paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] optional = True node_types = ["file", "alembic"] prohibited_vars = ["$HIP", "$JOB"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid() self.log.info( "node types to check: {}".format(", ".join(self.node_types))) @@ -29,13 +37,18 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): self.log.error( "{}: {}".format(param.path(), param.unexpandedString())) - raise RuntimeError("Invalid paths found") + raise PublishValidationError( + "Invalid paths found", title=self.label) @classmethod def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): + # it might return None for some reason + if not param: + continue # skip nodes we are not interested in + cls.log.debug(param) if param.node().type().name() not in cls.node_types: continue From a1377a87d6001acb91429022b14a1db12e3f57a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:17 +0200 Subject: [PATCH 030/409] :construction: dealing with identifiers --- .../plugins/create/create_alembic_camera.py | 42 +++++++++---------- .../plugins/create/create_pointcache.py | 13 +++--- 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index eef86005f5..294c99744b 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,46 +1,44 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateAlembicCamera(plugin.Creator): +class CreateAlembicCamera(plugin.HoudiniCreator): """Single baked camera from Alembic ROP""" - name = "camera" + identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" family = "camera" icon = "camera" - def __init__(self, *args, **kwargs): - super(CreateAlembicCamera, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - # Set node type to create for output - self.data.update({"node_type": "alembic"}) + instance = super(CreateAlembicCamera, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/%s.abc" % self.name, + "filename": "$HIP/pyblish/{}.abc".format(subset_name), "use_sop_path": False, } - if self.nodes: - node = self.nodes[0] - path = node.path() + if self._nodes: + path = self._nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) parms.update({"root": "/" + root, "objects": remainder}) - instance.setParms(parms) + instance_node.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance.parm("use_sop_path").lock(True) - instance.parm("trange").set(1) + instance_node.parm("use_sop_path").lock(True) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 3365e25091..889e27ba51 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,18 +1,19 @@ # -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance -import hou - class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - identifier = "pointcache" + identifier = "io.openpype.creators.houdini.pointcache" label = "Point Cache" family = "pointcache" icon = "gears" def create(self, subset_name, instance_data, pre_create_data): + import hou + instance_data.pop("active", None) instance_data.update({"node_type": "alembic"}) @@ -21,7 +22,7 @@ class CreatePointCache(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("members")[0]) + instance_node = hou.node(instance.get("instance_node")) parms = { "use_sop_path": True, "build_from_path": True, @@ -32,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if instance_node: - parms["sop_path"] = instance_node.path() + if self._nodes: + parms["sop_path"] = self._nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From dade064eb3f50b6b70aedec4e6d0cd487f7a9a70 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:30 +0200 Subject: [PATCH 031/409] :construction: solving hda publishing --- .../houdini/plugins/create/create_hda.py | 53 +++++++------------ .../houdini/plugins/publish/extract_hda.py | 2 +- 2 files changed, 21 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b98da8b8bb..b1751d0b6c 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,28 +1,22 @@ # -*- coding: utf-8 -*- -import hou - +"""Creator plugin for creating publishable Houdini Digital Assets.""" from openpype.client import ( get_asset_by_name, get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import lib -from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import (lib, plugin) -class CreateHDA(plugin.Creator): +class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - name = "hda" + identifier = "hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" maintain_selection = False - def __init__(self, *args, **kwargs): - super(CreateHDA, self).__init__(*args, **kwargs) - self.data.pop("active", None) - def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" @@ -40,28 +34,34 @@ class CreateHDA(plugin.Creator): } return subset_name.lower() in existing_subset_names_low - def _process(self, instance): - subset_name = self.data["subset"] - # get selected nodes - out = hou.node("/obj") - self.nodes = hou.selectedNodes() + def create(self, subset_name, instance_data, pre_create_data): + import hou - if (self.options or {}).get("useSelection") and self.nodes: - # if we have `use selection` enabled and we have some + instance_data.pop("active", None) + + instance = super(CreateHDA, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = hou.node(instance.get("instance_node")) + out = hou.node("/obj") + if self._nodes: + # if we have `use selection` enabled, and we have some # selected nodes ... subnet = out.collapseIntoSubnet( self.nodes, - subnet_name="{}_subnet".format(self.name)) + subnet_name="{}_subnet".format(subset_name)) subnet.moveToGoodPosition() to_hda = subnet else: to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(self.name)) + "subnet", node_name="{}_subnet".format(subset_name)) if not to_hda.type().definition(): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise Exception( + raise plugin.OpenPypeCreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( @@ -78,17 +78,4 @@ class CreateHDA(plugin.Creator): hda_node.setName(subset_name) - # delete node created by Avalon in /out - # this needs to be addressed in future Houdini workflow refactor. - - hou.node("/out/{}".format(subset_name)).destroy() - - try: - lib.imprint(hda_node, self.data) - except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( - ("Cannot set metadata on asset. Might be that it already is " - "OpenPype asset.") - ) - return hda_node diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 4352939a2c..50a7ce2908 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data["members"][0] + hda_node = instance.data.get("members")[0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From 01c60e6fa777029ce50864d5cae843e24f797fb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:02 +0200 Subject: [PATCH 032/409] :recycle: rename selected node, instance node creation n method --- openpype/hosts/houdini/api/plugin.py | 32 ++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index f300496a43..8180676ce8 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -94,23 +94,41 @@ class Creator(LegacyCreator): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator): - _nodes = [] + selected_nodes = [] + + def _create_instance_node( + self, node_name, parent, + node_type="geometry"): + # type: (str, str, str) -> hou.Node + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + hou.Node: Newly created instance node. + + """ + parent_node = hou.node(parent) + instance_node = parent_node.createNode( + node_type, node_name=node_name) + instance_node.moveToGoodPosition() + return instance_node def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): - self._nodes = hou.selectedNodes() + self.selected_nodes = hou.selectedNodes() # Get the node type and remove it from the data, not needed node_type = instance_data.pop("node_type", None) if node_type is None: node_type = "geometry" - # Get out node - out = hou.node("/out") - instance_node = out.createNode( - node_type, node_name=subset_name) - instance_node.moveToGoodPosition() + instance_node = self._create_instance_node( + subset_name, "/out", node_type, pre_create_data) # wondering if we'll ever need more than one member here # in Houdini From fc5c07f1ca08021048acc99c24bad1e7656aa378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:25 +0200 Subject: [PATCH 033/409] :recycle: selected nodes argument rename --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 4 ++-- openpype/hosts/houdini/plugins/create/create_pointcache.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 294c99744b..483c4205a8 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -29,8 +29,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): "use_sop_path": False, } - if self._nodes: - path = self._nodes[0].path() + if self.selected_nodes: + path = self.selected_nodes.path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 889e27ba51..239f3ce50b 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -33,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if self._nodes: - parms["sop_path"] = self._nodes[0].path() + if self.selected_nodes: + parms["sop_path"] = self.selected_nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From 9b32b4926ce8eb3356c9aea899acf05b0fe77ece Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:47 +0200 Subject: [PATCH 034/409] :construction: hda creator refactor --- .../houdini/plugins/create/create_hda.py | 73 ++++++++++--------- 1 file changed, 38 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b1751d0b6c..67e338b1b3 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -34,6 +34,43 @@ class CreateHDA(plugin.HoudiniCreator): } return subset_name.lower() in existing_subset_names_low + def _create_instance_node( + self, node_name, parent, node_type="geometry"): + parent_node = hou.node("/obj") + if self.selected_nodes: + # if we have `use selection` enabled, and we have some + # selected nodes ... + subnet = parent_node.collapseIntoSubnet( + self._nodes, + subnet_name="{}_subnet".format(node_name)) + subnet.moveToGoodPosition() + to_hda = subnet + else: + to_hda = parent_node.createNode( + "subnet", node_name="{}_subnet".format(node_name)) + if not to_hda.type().definition(): + # if node type has not its definition, it is not user + # created hda. We test if hda can be created from the node. + if not to_hda.canCreateDigitalAsset(): + raise plugin.OpenPypeCreatorError( + "cannot create hda from node {}".format(to_hda)) + + hda_node = to_hda.createDigitalAsset( + name=node_name, + hda_file_name="$HIP/{}.hda".format(node_name) + ) + hda_node.layoutChildren() + elif self._check_existing(node_name): + raise plugin.OpenPypeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(node_name)) + else: + hda_node = to_hda + + hda_node.setName(node_name) + return hda_node + + def create(self, subset_name, instance_data, pre_create_data): import hou @@ -44,38 +81,4 @@ class CreateHDA(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("instance_node")) - out = hou.node("/obj") - if self._nodes: - # if we have `use selection` enabled, and we have some - # selected nodes ... - subnet = out.collapseIntoSubnet( - self.nodes, - subnet_name="{}_subnet".format(subset_name)) - subnet.moveToGoodPosition() - to_hda = subnet - else: - to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(subset_name)) - if not to_hda.type().definition(): - # if node type has not its definition, it is not user - # created hda. We test if hda can be created from the node. - if not to_hda.canCreateDigitalAsset(): - raise plugin.OpenPypeCreatorError( - "cannot create hda from node {}".format(to_hda)) - - hda_node = to_hda.createDigitalAsset( - name=subset_name, - hda_file_name="$HIP/{}.hda".format(subset_name) - ) - hda_node.layoutChildren() - elif self._check_existing(subset_name): - raise plugin.OpenPypeCreatorError( - ("subset {} is already published with different HDA" - "definition.").format(subset_name)) - else: - hda_node = to_hda - - hda_node.setName(subset_name) - - return hda_node + return instance From 4624fb930ff580b1f33c34ec8d3426f7e6fafd4d Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:26:49 +0200 Subject: [PATCH 035/409] :recycle: minor fixes --- .../houdini/plugins/publish/validate_alembic_face_sets.py | 5 ++--- .../houdini/plugins/publish/validate_alembic_input_node.py | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 7c1d068390..10681e4b72 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,7 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -18,7 +17,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index aa572dc3bb..4355bc7921 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError From 2c59d6317932cd6040b9c77f316112922b850a79 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:27:28 +0200 Subject: [PATCH 036/409] :recycle: change vdb cache creator to new publisher --- .../plugins/create/create_vbd_cache.py | 38 +++++++++---------- .../publish/validate_vdb_output_node.py | 10 +++-- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 242c21fc72..1a5011745f 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,38 +1,36 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating VDB Caches.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateVDBCache(plugin.Creator): +class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" - + identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" family = "vdbcache" icon = "cloud" - def __init__(self, *args, **kwargs): - super(CreateVDBCache, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "geometry"}) - # Set node type to create for output - self.data["node_type"] = "geometry" + instance = super(CreateVDBCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name), "initsim": True, "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"soppath": node.path()}) + if self.selected_nodes: + parms["soppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 9be2635a9e..a9f8b38e7e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" @@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" " of type VDB!" + raise PublishValidationError( + "Node connected to the output node is not" " of type VDB!", + title=self.label ) @classmethod From dff7c27562dedda5ce3a1daece04840121b8001a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:28:25 +0200 Subject: [PATCH 037/409] :bug: fix function call --- openpype/hosts/houdini/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 8180676ce8..28830bdc64 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -128,7 +128,7 @@ class HoudiniCreator(NewCreator): node_type = "geometry" instance_node = self._create_instance_node( - subset_name, "/out", node_type, pre_create_data) + subset_name, "/out", node_type) # wondering if we'll ever need more than one member here # in Houdini From c5e7d8f93c620abbcc64a6fdcb7a6824558f57f7 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:33:20 +0200 Subject: [PATCH 038/409] :recycle: handle file saving --- openpype/hosts/houdini/api/pipeline.py | 7 +++++++ .../houdini/plugins/publish/increment_current_file.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b9246251a2..4ff24c8004 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -166,6 +166,13 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx = self._create_context_node() return lib.read(op_ctx) + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c990f481d3..92ac9fbeca 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -27,4 +27,4 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save(new_filepath) + host.save_file(new_filepath) From 99bf89cafae2e94ec927d948811e60e5b15cfb44 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:02 +0200 Subject: [PATCH 039/409] :recycle: handle frame data --- openpype/hosts/houdini/api/lib.py | 27 +++++++++++++++++++ openpype/hosts/houdini/api/plugin.py | 2 +- .../houdini/plugins/publish/collect_frames.py | 2 ++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index f438944b09..d0a3068531 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -530,3 +530,30 @@ def get_template_from_value(key, value): raise TypeError("Unsupported type: %r" % type(value)) return parm + + +def get_frame_data(node): + """Get the frame data: start frame, end frame and steps. + + Args: + node(hou.Node) + + Returns: + dict: frame data for star, end and steps. + + """ + data = {} + + if node.parm("trange") is None: + + return data + + if node.evalParm("trange") == 0: + self.log.debug("trange is 0") + return data + + data["frameStart"] = node.evalParm("f1") + data["frameEnd"] = node.evalParm("f2") + data["steps"] = node.evalParm("f3") + + return data \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 28830bdc64..ee73745651 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, get_frame_data class OpenPypeCreatorError(CreatorError): diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cad894cc3f..cd94635c29 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -25,6 +25,8 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): ropnode = instance.data["members"][0] + frame_data = lib.get_frame_data(ropnode) + instance.data.update(frame_data) start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) From bd8b2c7d70a13a85f89ab4f60489a8114e9cdf01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:26 +0200 Subject: [PATCH 040/409] :recycle: arnold creator --- .../plugins/create/create_arnold_ass.py | 45 ++++++++++--------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 72088e43b0..b3926b8cee 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Arnold ASS files.""" from openpype.hosts.houdini.api import plugin -class CreateArnoldAss(plugin.Creator): +class CreateArnoldAss(plugin.HoudiniCreator): """Arnold .ass Archive""" + identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" family = "ass" icon = "magic" @@ -12,42 +15,40 @@ class CreateArnoldAss(plugin.Creator): # Default extension: `.ass` or `.ass.gz` ext = ".ass" - def __init__(self, *args, **kwargs): - super(CreateArnoldAss, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "arnold"}) - self.data.update({"node_type": "arnold"}) + instance = super(CreateArnoldAss, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def process(self): - node = super(CreateArnoldAss, self).process() + instance_node = hou.node(instance.get("instance_node")) - basename = node.name() - node.setName(basename + "_ASS", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ASS", unique_name=True) # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export - parm_template_group = node.parmTemplateGroup() + parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) - node.setParmTemplateGroup(parm_template_group) + instance_node.setParmTemplateGroup(parm_template_group) - filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext) + filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) parms = { # Render frame range "trange": 1, - # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1 + "ar_ass_export_enable": 1, + "filename": filepath } - node.setParms(parms) - # Lock the ASS export attribute - node.parm("ar_ass_export_enable").lock(True) - - # Lock some Avalon attributes - to_lock = ["family", "id"] + # Lock any parameters in this list + to_lock = ["ar_ass_export_enable", "family", "id"] for name in to_lock: - parm = node.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 93b3b0403401075596e9951c06fc5414e7fa50a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:42 +0200 Subject: [PATCH 041/409] :recycle: composite creator --- .../plugins/create/create_composite.py | 51 +++++++++---------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index e278708076..96d8ca9fd5 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,44 +1,43 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateCompositeSequence(plugin.Creator): +class CreateCompositeSequence(plugin.HoudiniCreator): """Composite ROP to Image Sequence""" + identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" family = "imagesequence" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreateCompositeSequence, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou + from pprint import pformat - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "comp"}) - # Type of ROP node to create - self.data.update({"node_type": "comp"}) + instance = super(CreateCompositeSequence, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + self.log.info(pformat(instance)) + print(pformat(instance)) + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. + filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + parms = { + "copoutput": filepath + } - """ - parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} - - if self.nodes: - node = self.nodes[0] - parms.update({"coppath": node.path()}) - - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - try: - parm = instance.parm(name) - parm.lock(True) - except AttributeError: - # missing lock pattern - self.log.debug( - "missing lock pattern {}".format(name)) + parm = instance_node.parm(name) + parm.lock(True) + From ec4bcc474b7a3c3701ae45c8008536d0fc3d7992 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 12:25:48 +0200 Subject: [PATCH 042/409] :recycle: replace exceptions and asserts in validators --- .../plugins/publish/validate_bypass.py | 12 +++++---- .../publish/validate_cop_output_node.py | 19 ++++++++----- .../publish/validate_file_extension.py | 11 +++++--- .../validate_houdini_license_category.py | 10 ++++--- .../publish/validate_mkpaths_toggled.py | 13 ++++----- .../plugins/publish/validate_no_errors.py | 9 ++++--- .../publish/validate_remote_publish.py | 27 ++++++++++++------- .../validate_remote_publish_enabled.py | 11 +++++--- .../publish/validate_sop_output_node.py | 9 ++++--- .../validate_usd_layer_path_backslashes.py | 8 +++--- .../publish/validate_usd_model_and_shade.py | 6 +++-- .../publish/validate_usd_output_node.py | 9 ++++--- .../validate_usd_render_product_names.py | 7 +++-- .../plugins/publish/validate_usd_setdress.py | 7 +++-- .../validate_usd_shade_model_exists.py | 9 ++++--- .../publish/validate_usd_shade_workspace.py | 23 +++++++++------- 16 files changed, 121 insertions(+), 69 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 1b441b8da9..59ab2d2b1b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder - 0.1 + order = pyblish.api.ValidatorOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -26,9 +27,10 @@ class ValidateBypassed(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] - raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." - % rop.path() + raise PublishValidationError( + ("ROP node {} is set to bypass, publishing cannot " + "continue.".format(rop.path())), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 86ddc2adf2..2e99e5fb41 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError + class ValidateCopOutputNode(pyblish.api.InstancePlugin): """Validate the instance COP Output Node. @@ -20,9 +23,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -54,7 +58,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Cop2": + raise PublishValidationError( + ("Output node %s is not of category Cop2. " + "This is a bug...").format(output_node.path()), + title=cls.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index f050a41b88..5211cdb919 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import os import pyblish.api from openpype.hosts.houdini.api import lib +from openpype.pipeline import PublishValidationError class ValidateFileExtension(pyblish.api.InstancePlugin): @@ -29,8 +31,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "ROP node has incorrect " "file extension: %s" % invalid + raise PublishValidationError( + "ROP node has incorrect file extension: {}".format(invalid), + title=self.label ) @classmethod @@ -53,7 +56,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): for family in families: extension = cls.family_extensions.get(family, None) if extension is None: - raise RuntimeError("Unsupported family: %s" % family) + raise PublishValidationError( + "Unsupported family: {}".format(family), + title=cls.label) if output_extension != extension: return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f5f03aa844..f1c52f22c1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): @@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError( - "USD Publishing requires a full Commercial " - "license. You are on: %s" % license - ) + raise PublishValidationError( + ("USD Publishing requires a full Commercial " + "license. You are on: {}").format(license), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index be6a798a95..9d1f92a101 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" @@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid - ) + raise PublishValidationError( + ("Found ROP node with Create Intermediate " + "Directories turned off: {}".format(invalid)), + title=self.label) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 77e7cc9ff7..fd396ad8c9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError def cook_in_range(node, start, end): @@ -28,7 +29,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] label = "Validate no errors" @@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): errors = get_errors(node) if errors: self.log.error(errors) - raise RuntimeError("Node has errors: %s" % node.path()) + raise PublishValidationError( + "Node has errors: {}".format(node.path()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 0ab182c584..7349022681 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,9 @@ +# -*-coding: utf-8 -*- import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError import hou @@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): # We ensure it's a shell node and that it has the pre-render script # set correctly. Plus the shell script it will trigger should be # completely empty (doing nothing) - assert node.type().name() == "shell", "Must be shell ROP node" - assert node.parm("command").eval() == "", "Must have no command" - assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert ( - node.parm("prerender").eval() == cmd - ), "REMOTE_PUBLISH node does not have correct prerender script." - assert ( - node.parm("lprerender").eval() == "python" - ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + if node.type().name() != "shell": + self.raise_error("Must be shell ROP node") + if node.parm("command").eval() != "": + self.raise_error("Must have no command") + if node.parm("shellexec").eval(): + self.raise_error("Must not execute in shell") + if node.parm("prerender").eval() != cmd: + self.raise_error(("REMOTE_PUBLISH node does not have " + "correct prerender script.")) + if node.parm("lprerender").eval() != "python": + self.raise_error(("REMOTE_PUBLISH node prerender script " + "type not set to 'python'")) @classmethod def repair(cls, context): """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) + + def raise_error(self, message): + self.log.error(message) + raise PublishValidationError(message, title=self.label) \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index afc8df7528..8ec62f4e85 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=self.label) if node.isBypassed(): - raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + raise PublishValidationError( + "REMOTE_PUBLISH must not be bypassed.", title=self.label) @classmethod def repair(cls, context): @@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=cls.label) cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index c18ad7a1b7..a1a96120e2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -58,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Sop": + raise PublishValidationError( + ("Output node {} is not of category Sop. " + "This is a bug.").format(output_node.path()), + title=cls.label) # Ensure the node is cooked and succeeds to cook so we can correctly # check for its geometry data. diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 95cad82085..3e593a9508 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): @@ -44,7 +46,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): invalid.append(layer) if invalid: - raise RuntimeError( + raise PublishValidationError(( "Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering." - ) + "This is invalid for HUSK USD rendering."), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index bdb7c05319..3ca0fd0298 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -1,7 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib - +from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux @@ -55,7 +56,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): if invalid: prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) - raise RuntimeError("Found invalid primitives: %s" % prim_paths) + raise PublishValidationError( + "Found invalid primitives: {}".format(prim_paths)) class ValidateUsdShade(ValidateUsdModel): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 0c38ccd4be..9a4d292778 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateUSDOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 36336a03ae..02c44ab94e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- +import os import pyblish.api -import os +from openpype.pipeline import PublishValidationError class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): @@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): if invalid: for message in invalid: self.log.error(message) - raise RuntimeError("USD Render Paths are invalid.") + raise PublishValidationError( + "USD Render Paths are invalid.", title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 835cd5977a..89ae8b8ad9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUsdSetDress(pyblish.api.InstancePlugin): @@ -47,8 +49,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): invalid.append(node) if invalid: - raise RuntimeError( + raise PublishValidationError(( "SetDress contains local geometry. " "This is not allowed, it must be an assembly " - "of referenced assets." + "of referenced assets."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index f08c7c72c5..c4f118ac3b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import re import pyblish.api @@ -5,6 +6,7 @@ import pyblish.api from openpype.client import get_subset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: - raise RuntimeError( - "USD Model subset not found: " - "%s (%s)" % (model_subset, asset_name) + raise PublishValidationError( + ("USD Model subset not found: " + "{} ({})").format(model_subset, asset_name), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2781756272..2ff2702061 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError import hou @@ -12,7 +13,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" @@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): if node_type != other_node_type: continue - # Get highest version + # Get the highest version highest = max(highest, other_version) if version != highest: - raise RuntimeError( - "Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest) + raise PublishValidationError( + ("Shading Workspace is not the latest version." + " Found {}. Latest is {}.").format(version, highest), + title=self.label ) # There were some issues with the editable node not having the right @@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError( - "Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values." + raise PublishValidationError( + ("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values."), + title=self.label ) From 08ac24080f863e904b4ddec4b53a9c9f502f9685 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 15:02:04 +0200 Subject: [PATCH 043/409] :recycle: convert creators --- .../plugins/create/create_redshift_proxy.py | 40 +++++++------- .../plugins/create/create_redshift_rop.py | 54 +++++++++---------- .../houdini/plugins/create/create_usd.py | 38 ++++++------- .../plugins/create/create_usdrender.py | 37 ++++++------- 4 files changed, 85 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index da4d80bf2b..d4bfe9d253 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -1,18 +1,20 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Redshift proxies.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftProxy(plugin.Creator): +class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" - + identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" family = "redshiftproxy" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateRedshiftProxy, self).__init__(*args, **kwargs) - + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) # Redshift provides a `Redshift_Proxy_Output` node type which shows # a limited set of parameters by default and is set to extract a @@ -21,28 +23,26 @@ class CreateRedshiftProxy(plugin.Creator): # why this happens. # TODO: Somehow enforce so that it only shows the original limited # attributes of the Redshift_Proxy_Output node type - self.data.update({"node_type": "Redshift_Proxy_Output"}) + instance_data.update({"node_type": "Redshift_Proxy_Output"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftProxy, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs', + "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name), } - if self.nodes: - node = self.nodes[0] - path = node.path() - parms["RS_archive_sopPath"] = path + if self.selected_nodes: + parms["RS_archive_sopPath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["family", "id", "prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 6949ca169b..2bb8325623 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,41 +1,40 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin to create Redshift ROP.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftROP(plugin.Creator): +class CreateRedshiftROP(plugin.HoudiniCreator): """Redshift ROP""" - + identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" family = "redshift_rop" icon = "magic" defaults = ["master"] - def __init__(self, *args, **kwargs): - super(CreateRedshiftROP, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa + + instance_data.pop("active", None) + instance_data.update({"node_type": "Redshift_ROP"}) + # Add chunk size attribute + instance_data["chunkSize"] = 10 # Clear the family prefix from the subset - subset = self.data["subset"] + subset = subset_name subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] - self.data["subset"] = subset_no_prefix + subset_name = subset_no_prefix - # Add chunk size attribute - self.data["chunkSize"] = 10 + instance = super(CreateRedshiftROP, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_node = hou.node(instance.get("instance_node")) - self.data.update({"node_type": "Redshift_ROP"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - basename = instance.name() - instance.setName(basename + "_ROP", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop try: @@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator): "Redshift_IPR", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise Exception(("Cannot create Redshift node. Is Redshift " - "installed and enabled?")) + raise plugin.OpenPypeCreatorError( + ("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP - ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1)) # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) @@ -61,10 +61,10 @@ class CreateRedshiftROP(plugin.Creator): "RS_outputMultilayerMode": 0, # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", } - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5bcb7840c0..8502a4e5e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,39 +1,39 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating USDs.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" - + identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" family = "usd" icon = "gears" enabled = False - def __init__(self, *args, **kwargs): - super(CreateUSD, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usd"}) - self.data.update({"node_type": "usd"}) + instance = super(CreateUSD, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - - """ parms = { - "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), "enableoutputprocessor_simplerelativepaths": False, } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = [ @@ -43,5 +43,5 @@ class CreateUSD(plugin.Creator): "id", ] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index cb3fe3f02b..e5c61d2984 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,42 +1,43 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin for creating USD renders.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSDRender(plugin.Creator): +class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" - + identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" family = "usdrender" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateUSDRender, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.parent = hou.node("/stage") + instance_data["parent"] = hou.node("/stage") # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usdrender"}) - self.data.update({"node_type": "usdrender"}) + instance = super(CreateUSDRender, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - """ parms = { # Render frame range "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) - instance.setParms(parms) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 71caefe44915f9618e276812408d29ebd4ca5a51 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:28 +0200 Subject: [PATCH 044/409] :recycle: refactor parameter locking --- openpype/hosts/houdini/api/plugin.py | 15 +++++++++++++++ .../houdini/plugins/create/create_arnold_ass.py | 4 +--- .../houdini/plugins/create/create_composite.py | 11 ++--------- .../houdini/plugins/create/create_pointcache.py | 4 +--- .../plugins/create/create_redshift_proxy.py | 4 +--- .../houdini/plugins/create/create_redshift_rop.py | 4 +--- .../hosts/houdini/plugins/create/create_usd.py | 4 +--- .../houdini/plugins/create/create_usdrender.py | 4 +--- 8 files changed, 23 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee73745651..5c52cb416b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -150,6 +150,21 @@ class HoudiniCreator(NewCreator): OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + def lock_parameters(self, node, parameters): + """Lock list of specified parameters on the node. + + Args: + node (hou.Node): Houdini node to lock parameters on. + parameters (list of str): List of parameter names. + + """ + for name in parameters: + try: + parm = node.parm(name) + parm.lock(True) + except AttributeError: + self.log.debug("missing lock pattern {}".format(name)) + def collect_instances(self): for instance in list_instances(creator_id=self.identifier): created_instance = CreatedInstance.from_existing( diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index b3926b8cee..a48658ab99 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -49,6 +49,4 @@ class CreateArnoldAss(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 96d8ca9fd5..1a9c56571a 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -13,8 +13,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): icon = "gears" def create(self, subset_name, instance_data, pre_create_data): - import hou - from pprint import pformat + import hou # noqa instance_data.pop("active", None) instance_data.update({"node_type": "comp"}) @@ -24,10 +23,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - self.log.info(pformat(instance)) - print(pformat(instance)) instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) parms = { "copoutput": filepath @@ -37,7 +33,4 @@ class CreateCompositeSequence(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) - + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 239f3ce50b..124936d285 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -41,6 +41,4 @@ class CreatePointCache(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index d4bfe9d253..8b6a68437b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -43,6 +43,4 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id", "prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 2bb8325623..2cbe9bfda1 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -65,6 +65,4 @@ class CreateRedshiftROP(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 8502a4e5e9..51ed8237c5 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -42,6 +42,4 @@ class CreateUSD(plugin.HoudiniCreator): "family", "id", ] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index e5c61d2984..f78f0bed50 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -38,6 +38,4 @@ class CreateUSDRender(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) From df2f68db9798bddffb8ee8fcfcf08764dffc44e9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:56 +0200 Subject: [PATCH 045/409] :recycle: move splitext to lib --- openpype/hosts/houdini/api/lib.py | 23 ++++++++++++++++++- .../houdini/plugins/publish/collect_frames.py | 21 +++++++---------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index d0a3068531..8d6f666eb7 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import sys +import os import uuid import logging from contextlib import contextmanager @@ -556,4 +557,24 @@ def get_frame_data(node): data["frameEnd"] = node.evalParm("f2") data["steps"] = node.evalParm("f3") - return data \ No newline at end of file + return data + + +def splitext(name, allowed_multidot_extensions): + # type: (str, list) -> tuple + """Split file name to name and extension. + + Args: + name (str): File name to split. + allowed_multidot_extensions (list of str): List of allowed multidot + extensions. + + Returns: + tuple: Name and extension. + """ + + for ext in allowed_multidot_extensions: + if name.endswith(ext): + return name[:-len(ext)], ext + + return os.path.splitext(name) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cd94635c29..9108432384 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -1,19 +1,13 @@ +# -*- coding: utf-8 -*- +"""Collector plugin for frames data on ROP instances.""" import os import re -import hou +import hou # noqa import pyblish.api from openpype.hosts.houdini.api import lib -def splitext(name, allowed_multidot_extensions): - - for ext in allowed_multidot_extensions: - if name.endswith(ext): - return name[:-len(ext)], ext - - return os.path.splitext(name) - class CollectFrames(pyblish.api.InstancePlugin): """Collect all frames which would be saved from the ROP nodes""" @@ -40,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin): self.log.warning("Using current frame: {}".format(hou.frame())) output = output_parm.eval() - _, ext = splitext(output, + _, ext = lib.splitext(output, allowed_multidot_extensions=[".ass.gz"]) file_name = os.path.basename(output) result = file_name # Get the filename pattern match from the output - # path so we can compute all frames that would + # path, so we can compute all frames that would # come out from rendering the ROP node if there # is a frame pattern in the name pattern = r"\w+\.(\d+)" + re.escape(ext) @@ -65,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin): # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) - def create_file_list(self, match, start_frame, end_frame): - """Collect files based on frame range and regex.match + @staticmethod + def create_file_list(match, start_frame, end_frame): + """Collect files based on frame range and `regex.match` Args: match(re.match): match object From d59861a6539dd69e51180245ab6ce2164343aaab Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:07:21 +0200 Subject: [PATCH 046/409] :bug: update representation creation --- .../plugins/publish/extract_composite.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index eb77a91d62..4c91d51efd 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -3,7 +3,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop, splitext class ExtractComposite(openpype.api.Extractor): @@ -28,8 +28,24 @@ class ExtractComposite(openpype.api.Extractor): render_rop(ropnode) - if "files" not in instance.data: - instance.data["files"] = [] + output = instance.data["frames"] + _, ext = splitext(output[0], []) + ext = ext.lstrip(".") - frames = instance.data["frames"] - instance.data["files"].append(frames) + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": ext, + "files": output, + "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + } + + from pprint import pformat + + self.log.info(pformat(representation)) + + instance.data["representations"].append(representation) \ No newline at end of file From 3a935c968c97bd19695ae3888c9904a961397d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 21 Sep 2022 18:36:23 +0200 Subject: [PATCH 047/409] :rotating_light: cosmetic changes --- openpype/hosts/houdini/api/lib.py | 3 +++ openpype/hosts/houdini/api/pipeline.py | 7 ++++--- openpype/hosts/houdini/api/plugin.py | 5 +++-- .../houdini/plugins/create/create_alembic_camera.py | 6 ++++-- .../hosts/houdini/plugins/create/create_arnold_ass.py | 4 +++- openpype/hosts/houdini/plugins/create/create_hda.py | 9 ++++----- .../hosts/houdini/plugins/publish/extract_composite.py | 2 +- .../houdini/plugins/publish/increment_current_file.py | 6 +++--- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_remote_publish.py | 2 +- 10 files changed, 27 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 8d6f666eb7..3426040d65 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -20,6 +20,7 @@ self._parent = None log = logging.getLogger(__name__) JSON_PREFIX = "JSON:::" + def get_asset_fps(): """Return current asset fps.""" return get_current_project_asset()["data"].get("fps") @@ -418,6 +419,8 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects data = {} + if not node: + return data for parameter in node.spareParms(): value = parameter.eval() # test if value is json encoded dict diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 4ff24c8004..d64479fc14 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -91,10 +91,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): def save_workfile(self, dst_path=None): # Force forwards slashes to avoid segfault - filepath = dst_path.replace("\\", "/") - hou.hipFile.save(file_name=filepath, + if dst_path: + dst_path = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=dst_path, save_to_recent_files=True) - return filepath + return dst_path def open_workfile(self, filepath): # Force forwards slashes to avoid segfault diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 5c52cb416b..897696533f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read, get_frame_data +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,8 +96,9 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): selected_nodes = [] + @staticmethod def _create_instance_node( - self, node_name, parent, + node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node """Create node representing instance. diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 483c4205a8..183ab28b26 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -5,7 +5,7 @@ from openpype.pipeline import CreatedInstance class CreateAlembicCamera(plugin.HoudiniCreator): - """Single baked camera from Alembic ROP""" + """Single baked camera from Alembic ROP.""" identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" @@ -40,5 +40,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance_node.parm("use_sop_path").lock(True) + to_lock = ["use_sop_path"] + self.lock_parameters(instance_node, to_lock) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index a48658ab99..40b253d1aa 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -24,7 +24,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance instance_node = hou.node(instance.get("instance_node")) @@ -47,6 +47,8 @@ class CreateArnoldAss(plugin.HoudiniCreator): "filename": filepath } + instance_node.setParms(parms) + # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67e338b1b3..67c05b1634 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -5,7 +5,7 @@ from openpype.client import ( get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import (lib, plugin) +from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): @@ -36,6 +36,8 @@ class CreateHDA(plugin.HoudiniCreator): def _create_instance_node( self, node_name, parent, node_type="geometry"): + import hou + parent_node = hou.node("/obj") if self.selected_nodes: # if we have `use selection` enabled, and we have some @@ -70,15 +72,12 @@ class CreateHDA(plugin.HoudiniCreator): hda_node.setName(node_name) return hda_node - def create(self, subset_name, instance_data, pre_create_data): - import hou - instance_data.pop("active", None) instance = super(CreateHDA, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance return instance diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 4c91d51efd..8dbfd3e08c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -48,4 +48,4 @@ class ExtractComposite(openpype.api.Extractor): self.log.info(pformat(representation)) - instance.data["representations"].append(representation) \ No newline at end of file + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 92ac9fbeca..16d9ef9aec 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,7 +2,7 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host - +from openpype.hosts.houdini.api import HoudiniHost class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. @@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): # Filename must not have changed since collecting - host = registered_host() + host = registered_host() # type: HoudiniHost current_file = host.current_file() assert ( context.data["currentFile"] == current_file ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save_file(new_filepath) + host.save_workfile(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index 18fed7fbc4..41b5273e6a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -56,5 +56,5 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): if camera.type().name() != "cam": raise PublishValidationError( ("Object set in Alembic ROP is not a camera: " - "{} (type: {})").format(camera, camera.type().name()), + "{} (type: {})").format(camera, camera.type().name()), title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 7349022681..4e8e5fc0e8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -49,4 +49,4 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): def raise_error(self, message): self.log.error(message) - raise PublishValidationError(message, title=self.label) \ No newline at end of file + raise PublishValidationError(message, title=self.label) From a2f1b8087c4f66909b1304064dcde50b140f43eb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 4 Oct 2022 16:57:00 +0200 Subject: [PATCH 048/409] :construction: ground work for extractor --- openpype/hosts/maya/api/lib.py | 25 ++++ openpype/hosts/maya/api/mtoa.py | 179 ++++++++++++++++++++++++++++ openpype/hosts/maya/api/viewport.py | 19 +++ 3 files changed, 223 insertions(+) create mode 100644 openpype/hosts/maya/api/mtoa.py create mode 100644 openpype/hosts/maya/api/viewport.py diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 6a8447d6ad..410bbb3416 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3498,3 +3498,28 @@ def iter_visible_nodes_in_range(nodes, start, end): # If no more nodes to process break the frame iterations.. if not node_dependencies: break + + +@contextlib.contextmanager +def selection(*nodes): + """Execute something with a specific Maya selection. + + Example: + .. code-block:: python + + cmds.select('side') + print(cmds.ls(sl=True)) + # ['side'] + + with selection('top', 'lambert1'): + print(cmds.ls) + # ['top', 'lambert1'] + + print(cmds.ls(sl=True)) + # ['side'] + + """ + current = cmds.ls(sl=True) + cmds.select(*nodes, noExpand=True) + yield + cmds.select(current, noExpand=True) \ No newline at end of file diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py new file mode 100644 index 0000000000..6b9b1d6d44 --- /dev/null +++ b/openpype/hosts/maya/api/mtoa.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +"""Library of classes and functions deadling with MtoA functionality.""" +import tempfile +import contextlib + +import clique +import pyblish.api + +from maya import cmds + +from openpype.pipeline import publish +from .viewport import vp2_paused_context +from .lib import selection + + +class _AssExtractor(publish.Extractor): + """Base class for ASS type extractors.""" + + order = pyblish.api.ExtractorOrder + 0.01 + hosts = ["maya"] + + def get_ass_export_mask(self, maya_set): + import arnold # noqa + mask = arnold.AI_NODE_ALL + + ai_masks = {"options": {"value": arnold.AI_NODE_OPTIONS, + "default": False}, + "camera": {"value": arnold.AI_NODE_CAMERA, + "default": False}, + "light": {"value": arnold.AI_NODE_LIGHT, + "default": False}, + "shape": {"value": arnold.AI_NODE_SHAPE, + "default": True}, + "shader": {"value": arnold.AI_NODE_SHADER, + "default": True}, + "override": {"value": arnold.AI_NODE_OVERRIDE, + "default": False}, + "driver": {"value": arnold.AI_NODE_DRIVER, + "default": False}, + "filter": {"value": arnold.AI_NODE_FILTER, + "default": False}, + "color_manager": {"value": arnold.AI_NODE_COLOR_MANAGER, + "default": True}, + "operator": {"value": arnold.AI_NODE_OPERATOR, + "default": True}} + + for mask_name, mask_data in ai_masks.items(): + attr = "inf_ass_export_{}".format(mask_name) + + submask = self.get_set_attr("{}.{}".format(maya_set, attr), + default=mask_data["default"]) + + if not submask: + mask = mask ^ mask_data["value"] + + return mask + + def process(self, instance): + + dry_run = instance.data.get("ass.rr") + + staging_dir = self.staging_dir(instance) + sequence = instance.data.get("exportSequence", False) + + if not cmds.pluginInfo("mtoa", query=True, loaded=True): + cmds.loadPlugin("mtoa") + + # Export to a temporal path + export_dir = instance.context.data["stagingDir"] + export_path = tempfile.NamedTemporaryFile(suffix=".ass", + dir=export_dir, + delete=False) + + set_ = instance.data["set"] + kwargs = {"shadowLinks": 1, + "lightLinks": 1, + "boundingBox": True, + "selected": True, + "f": export_path.name} + + # Animation + + if sequence: + mask = self.get_ass_export_mask(set_) + start = instance.data.get("frameStartHandle", 1) + end = instance.data.get("frameEndHandle", 1) + step = instance.data.get("step", 1.0) + if start is not None: + kwargs["startFrame"] = float(start) + kwargs["endFrame"] = float(end) + kwargs["frameStep"] = float(step) + else: + mask = 44 + + # Generic options + if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), + False): + kwargs["expandProcedurals"] = True + + if self.get_set_attr("{}.inf_ass_fullpath".format(set_), + True): + kwargs["fullPath"] = True + + kwargs["mask"] = mask + + # Motion blur + mb = self.get_set_attr("{}.inf_ass_motion_blur".format(set_), False) + keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) + length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) + + targets = self.get_targets(instance) + + _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) + _sorted_kwargs = ["{}={!r}".format(x, y) for x, y in _sorted_kwargs] + + if not dry_run: + self.log.debug("Running command: cmds.arnoldExportAss({})" + .format(", ".join(_sorted_kwargs))) + with vp2_paused_context(): + with selection(targets): + with self.motion_blur_ctx(mb, keys, length): + result = cmds.arnoldExportAss(**kwargs) + else: + instance.data["assExportKwargs"] = kwargs + start = kwargs.get("startFrame") + end = kwargs.get("endFrame") + result = [] + + range_ = [0] + if start is not None: + range_ = range(int(start), int(end) + 1) + + for i in range_: + fp = "{}.{:03d}.ass".format(export_path.name, i) + with open(fp, "w"): + pass + result.append(fp) + + if len(result) == 1: + filepath = result[0] + else: + collection = clique.assemble(result)[0][0] + filepath = collection.format() + + # Register the file + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'ass', + 'ext': 'ass', + 'files': filepath, + "stagingDir": staging_dir + } + + instance.data["representations"].append(representation) + + @contextlib.contextmanager + def motion_blur_ctx(self, force, keys, length): + if not force: + yield + return + + cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") + ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") + clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") + + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) + if keys > 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) + if length >= 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) + + try: + yield + finally: + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) diff --git a/openpype/hosts/maya/api/viewport.py b/openpype/hosts/maya/api/viewport.py new file mode 100644 index 0000000000..cbf78ab815 --- /dev/null +++ b/openpype/hosts/maya/api/viewport.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +"""Tools for working with viewport in Maya.""" +import contextlib +from maya import cmds # noqa + + +@contextlib.contextmanager +def vp2_paused_context(): + """Context manager to stop updating of vp2 viewport.""" + state = cmds.ogs(pause=True, query=True) + + if not state: + cmds.ogs(pause=True) + + try: + yield + finally: + if cmds.ogs(pause=True, query=True) != state: + cmds.ogs(pause=True) From 21e98faef021b83fbd961a63d6398795b9db119d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:04 +0200 Subject: [PATCH 049/409] :sparkles: cache collected instances --- openpype/hosts/houdini/api/pipeline.py | 15 +++++++-------- openpype/hosts/houdini/api/plugin.py | 9 +++++++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index d64479fc14..f15cd6f2d5 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -435,10 +435,13 @@ def list_instances(creator_id=None): """ instance_signature = { - "id": "pyblish.avalon.instance", - "identifier": creator_id + "id": "pyblish.avalon.instance" } - return lib.lsattrs(instance_signature) + + return [ + i for i in lib.lsattrs(instance_signature) + if i.paramEval("creator_identifier") == creator_id + ] def remove_instance(instance): @@ -448,12 +451,8 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance.get("members") - if not nodes: - return - # Assume instance node is first node - instance_node = hou.node(nodes[0]) + instance_node = hou.node(instance.data.get("instance_node")) to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 897696533f..fa56b2cb8d 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -133,7 +133,7 @@ class HoudiniCreator(NewCreator): # wondering if we'll ever need more than one member here # in Houdini - instance_data["members"] = [instance_node.path()] + # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( @@ -167,7 +167,12 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - for instance in list_instances(creator_id=self.identifier): + instances = [i for i in self.collection_shared_data.get( + "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + if not instances: + print("not using cached instances") + instances = list_instances(creator_id=self.identifier) + for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self ) From 19d237323d628bd4e656bf379be30ef3f1df6be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:23 +0200 Subject: [PATCH 050/409] :bug: fix multiple selection --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 183ab28b26..481c6bea77 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin -from openpype.pipeline import CreatedInstance +from openpype.pipeline import CreatedInstance, CreatorError class CreateAlembicCamera(plugin.HoudiniCreator): @@ -30,7 +30,9 @@ class CreateAlembicCamera(plugin.HoudiniCreator): } if self.selected_nodes: - path = self.selected_nodes.path() + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) From 694bc49305d015ee0e773895541e3850695dce2f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:16:16 +0200 Subject: [PATCH 051/409] :bug: fix caching --- openpype/hosts/houdini/api/plugin.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fa56b2cb8d..679f7b0d0f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -167,11 +167,13 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - instances = [i for i in self.collection_shared_data.get( - "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + cached_instances = self.collection_shared_data.get( + "houdini_cached_instances") + instances = cached_instances.get(self.identifier) if not instances: print("not using cached instances") instances = list_instances(creator_id=self.identifier) + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self From 6ee68861a8bfa06f346c6f899bc26b5f8d29e670 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:40:33 +0200 Subject: [PATCH 052/409] :bug: fix missing keys --- openpype/hosts/houdini/api/plugin.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 679f7b0d0f..2a16b08908 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -168,11 +168,14 @@ class HoudiniCreator(NewCreator): def collect_instances(self): cached_instances = self.collection_shared_data.get( - "houdini_cached_instances") + "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - print("not using cached instances") instances = list_instances(creator_id=self.identifier) + if not self.collection_shared_data.get( + "houdini_cached_instances"): + self.collection_shared_data["houdini_cached_instances"] = {} + self.log.info("Caching instances for {}".format(self.identifier)) self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( From 696dc78be74dc8d48da411335c5e906db4c669ef Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:26:03 +0200 Subject: [PATCH 053/409] =?UTF-8?q?=F0=9F=A5=85=20catch=20edge=20case=20da?= =?UTF-8?q?ta=20flow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 3426040d65..ceb3b753e0 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -310,6 +310,9 @@ def imprint(node, data, update=False): """ if not data: return + if not node: + self.log.error("Node is not set, calling imprint on invalid data.") + return current_parms = {p.name(): p for p in node.spareParms()} update_parms = [] From 4fe053b109d892a5b5f3770be693ae72d1c19967 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:32:27 +0200 Subject: [PATCH 054/409] :recycle: refactor the use of `members` --- .../plugins/publish/collect_active_state.py | 3 ++- .../houdini/plugins/publish/collect_frames.py | 2 +- .../plugins/publish/collect_instances.py | 2 +- .../publish/collect_members_as_nodes.py | 21 ------------------- .../plugins/publish/collect_output_node.py | 2 +- .../plugins/publish/collect_redshift_rop.py | 2 +- .../publish/collect_render_products.py | 2 +- .../plugins/publish/collect_usd_layers.py | 6 ++++-- .../plugins/publish/extract_alembic.py | 4 +++- .../houdini/plugins/publish/extract_ass.py | 4 +++- .../plugins/publish/extract_composite.py | 4 +++- .../plugins/publish/extract_redshift_proxy.py | 4 +++- .../houdini/plugins/publish/extract_usd.py | 3 ++- .../plugins/publish/extract_usd_layered.py | 2 +- .../plugins/publish/extract_vdb_cache.py | 4 +++- .../validate_abc_primitive_to_detail.py | 17 +++++++-------- .../publish/validate_alembic_face_sets.py | 4 ++-- .../publish/validate_alembic_input_node.py | 3 ++- .../publish/validate_animation_settings.py | 3 ++- .../plugins/publish/validate_bypass.py | 3 ++- .../publish/validate_cop_output_node.py | 15 +++++++++++-- .../publish/validate_file_extension.py | 4 +++- .../plugins/publish/validate_frame_token.py | 3 ++- .../plugins/publish/validate_no_errors.py | 2 +- .../validate_primitive_hierarchy_paths.py | 14 ++++++------- .../publish/validate_sop_output_node.py | 2 +- .../validate_usd_layer_path_backslashes.py | 2 +- .../publish/validate_usd_model_and_shade.py | 4 +++- .../publish/validate_usd_output_node.py | 2 +- .../plugins/publish/validate_usd_setdress.py | 3 ++- .../publish/validate_usd_shade_workspace.py | 2 +- .../publish/validate_vdb_output_node.py | 2 +- 32 files changed, 81 insertions(+), 69 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index dd83721358..cc3f2e7fae 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,4 +1,5 @@ import pyblish.api +import hou class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9108432384..531cdf1249 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -18,7 +18,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) frame_data = lib.get_frame_data(ropnode) instance.data.update(frame_data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0582ee154c..bb85630552 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -84,7 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] - instance.data["members"] = [node] + instance.data["instance_node"] = node.path() instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py deleted file mode 100644 index 07d71c6605..0000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -import hou - - -class CollectMembersAsNodes(pyblish.api.InstancePlugin): - """Collects instance members as Houdini nodes.""" - - order = pyblish.api.CollectorOrder - 0.01 - hosts = ["houdini"] - label = "Collect Members as Nodes" - - def process(self, instance): - if not instance.data.get("creator_identifier"): - return - - nodes = [ - hou.node(member) for member in instance.data.get("members", []) - ] - - instance.data["members"] = nodes diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index a3989dc776..601ed17b39 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 33bf74610a..346bdf3421 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index e88c5ea0e6..fcd80e0082 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance.data["members"][0].path() + rop_path = instance.data["instance_node"].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c21b336403..833add854b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -3,6 +3,8 @@ import os import pyblish.api import openpype.hosts.houdini.api.usd as usdlib +import hou + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance.data["members"][0] + rop_node = hou.node(instance.get("instance_node")) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -55,7 +57,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] # include same USD ROP - layer_inst.append(instance.data["members"][0]) + layer_inst.append(rop_node) # include layer data layer_inst.append((layer, save_path)) diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 0ad7a5069f..cb2d4ef424 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAlembic(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index 864b8d5252..c6417ce18a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAss(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 1042dda8f0..7a1ab36b93 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -4,6 +4,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop, splitext +import hou + class ExtractComposite(publish.Extractor): @@ -14,7 +16,7 @@ class ExtractComposite(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index 4d32b6f97e..29ede98a52 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractRedshiftProxy(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 4f471af597..cbeb5add71 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou class ExtractUSD(publish.Extractor): @@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 7ce51c441b..0288b7363a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 8a6d3b578a..434d6a2160 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractVDBCache(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 55c705c65b..86e92a052f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -32,19 +32,18 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - + import hou # noqa output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - pattern = rop.parm("prim_to_detail_pattern").eval().strip() + pattern = rop_node.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( "Alembic ROP has no 'Primitive to Detail' pattern. " @@ -52,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -60,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] # Let's assume each attribute is explicitly named for now and has no # wildcards for Primitive to Detail. This simplifies the check. diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 10681e4b72..44d58cfa36 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api - +import hou class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.data["instance_node"]) facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 4355bc7921..bafb206bd3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -33,7 +34,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 32c5078b9f..f11f9c0c62 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateAnimationSettings(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 59ab2d2b1b..1bf51a986c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -36,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 2e99e5fb41..600dad8161 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import sys import pyblish.api +import six from openpype.pipeline import PublishValidationError @@ -34,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + try: + output_node = instance.data["output_node"] + except KeyError as e: + six.reraise( + PublishValidationError, + PublishValidationError( + "Can't determine COP output node.", + title=cls.__name__), + sys.exc_info()[2] + ) if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index 5211cdb919..4584e78f4f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline import PublishValidationError +import hou + class ValidateFileExtension(pyblish.api.InstancePlugin): """Validate the output file extension fits the output family. @@ -40,7 +42,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index b65e9ef62e..b5f6ba71e1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateFrameToken(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index fd396ad8c9..f7c95aaf4e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance.data["members"][0]) + validate_nodes.append(hou.node(instance.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index e1f1dc116e..d3a4c0cfbf 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder from openpype.pipeline import PublishValidationError +import hou class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -30,18 +31,17 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def get_invalid(cls, instance): output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -49,14 +49,14 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] cls.log.debug("Checking for attribute: %s" % path_attr) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a1a96120e2..ed7f438729 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -37,7 +37,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 3e593a9508..972ac59f49 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -26,7 +26,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 3ca0fd0298..a55eb70cb2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -6,6 +6,8 @@ from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux +import hou + def fullname(o): """Get fully qualified class name""" @@ -38,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 9a4d292778..af21efcafc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 89ae8b8ad9..01ebc0e828 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -22,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): def process(self, instance): from pxr import UsdGeom + import hou - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2ff2702061..bd3366a424 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index a9f8b38e7e..61c1209fc9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance.data["members"][0].path() + "ROP node '%s'." % instance.get("instance_node") ) return [instance] From d6826524949c471472d0b655931b78f44bdb55e2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:16 +0200 Subject: [PATCH 055/409] :recycle: absolute paths by default --- .../houdini/plugins/create/create_alembic_camera.py | 3 ++- .../hosts/houdini/plugins/create/create_arnold_ass.py | 11 +++++------ .../hosts/houdini/plugins/create/create_composite.py | 8 +++++++- .../hosts/houdini/plugins/create/create_pointcache.py | 9 ++++++++- 4 files changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 481c6bea77..fec64eb4a1 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -25,7 +25,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/{}.abc".format(subset_name), + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)), "use_sop_path": False, } diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 40b253d1aa..8b310753d0 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -28,23 +28,22 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) - basename = instance_node.name() - instance_node.setName(basename + "_ASS", unique_name=True) - # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) instance_node.setParmTemplateGroup(parm_template_group) - filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { # Render frame range "trange": 1, # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1, - "filename": filepath + "ar_ass_export_enable": 1 } instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 1a9c56571a..45af2b0630 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -12,6 +12,8 @@ class CreateCompositeSequence(plugin.HoudiniCreator): family = "imagesequence" icon = "gears" + ext = ".exr" + def create(self, subset_name, instance_data, pre_create_data): import hou # noqa @@ -24,8 +26,12 @@ class CreateCompositeSequence(plugin.HoudiniCreator): pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { + "trange": 1, "copoutput": filepath } diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 124936d285..6b6b277422 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -30,12 +30,19 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(subset_name) + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)) } if self.selected_nodes: parms["sop_path"] = self.selected_nodes[0].path() + # try to find output node + for child in self.selected_nodes[0].children(): + if child.type().name() == "output": + parms["sop_path"] = child.path() + break + instance_node.setParms(parms) instance_node.parm("trange").set(1) From 822f8f4bbc60c419e5f46fc7b4e7f205291951d9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:42 +0200 Subject: [PATCH 056/409] :art: check for missing files --- openpype/hosts/houdini/plugins/publish/extract_ass.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index c6417ce18a..0d246625ba 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -35,8 +35,12 @@ class ExtractAss(publish.Extractor): # error and thus still continues to the integrator. To capture that # we make sure all files exist files = instance.data["frames"] - missing = [fname for fname in files - if not os.path.exists(os.path.join(staging_dir, fname))] + missing = [] + for file_name in files: + full_path = os.path.normpath(os.path.join(staging_dir, file_name)) + if not os.path.exists(full_path): + missing.append(full_path) + if missing: raise RuntimeError("Failed to complete Arnold ass extraction. " "Missing output files: {}".format(missing)) From 0e0920336b9d821857d0128101df82759f3f7ae3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:34:06 +0200 Subject: [PATCH 057/409] =?UTF-8?q?=F0=9F=A9=B9=20parameter=20access?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/pipeline.py | 2 +- openpype/hosts/houdini/api/plugin.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f15cd6f2d5..689d4d711c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -440,7 +440,7 @@ def list_instances(creator_id=None): return [ i for i in lib.lsattrs(instance_signature) - if i.paramEval("creator_identifier") == creator_id + if i.parm("creator_identifier").eval() == creator_id ] diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2a16b08908..560aeec6ea 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -131,11 +131,7 @@ class HoudiniCreator(NewCreator): instance_node = self._create_instance_node( subset_name, "/out", node_type) - # wondering if we'll ever need more than one member here - # in Houdini - # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() - instance = CreatedInstance( self.family, subset_name, From f4b92f4d1daa67243369440aa6a4339c6c646f1b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:51:30 +0200 Subject: [PATCH 058/409] :art: improve imprinting --- openpype/hosts/houdini/api/lib.py | 10 ++++++---- openpype/hosts/houdini/api/plugin.py | 9 +++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ceb3b753e0..2452ceef62 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -324,14 +324,16 @@ def imprint(node, data, update=False): parm = get_template_from_value(key, value) - if key in current_parms.keys(): + if key in current_parms: + if node.evalParm(key) == data[key]: + continue if not update: - log.debug("{} already exists on {}".format(key, node)) + log.debug(f"{key} already exists on {node}") else: - log.debug("replacing {}".format(key)) + log.debug(f"replacing {key}") update_parms.append(parm) continue - # parm.hide(True) + templates.append(parm) parm_group = node.parmTemplateGroup() diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 560aeec6ea..51476fef52 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -184,12 +184,13 @@ class HoudiniCreator(NewCreator): instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } imprint( instance_node, - { - key: value[1] for key, value in _changes.items() - if current_data.get(key) != value[1] - }, + new_values, update=True ) From 021800d1dd72fe65039c2bf427e67b76fdc239f6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:52:00 +0200 Subject: [PATCH 059/409] :coffin: remove unused code --- .../hosts/houdini/hooks/set_operators_path.py | 25 ------------------ openpype/hosts/houdini/otls/OpenPype.hda | Bin 8238 -> 0 bytes 2 files changed, 25 deletions(-) delete mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py delete mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py deleted file mode 100644 index 6f26baaa78..0000000000 --- a/openpype/hosts/houdini/hooks/set_operators_path.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -from openpype.lib import PreLaunchHook -import os - - -class SetOperatorsPath(PreLaunchHook): - """Set path to OpenPype assets folder.""" - - app_groups = ["houdini"] - - def execute(self): - hou_path = self.launch_context.env.get("HOUDINIPATH") - - openpype_assets = os.path.join( - os.getenv("OPENPYPE_REPOS_ROOT"), - "openpype", "hosts", "houdini", "hda" - ) - - if not hou_path: - self.launch_context.env["HOUDINIPATH"] = openpype_assets - return - - self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( - hou_path, os.pathsep, openpype_assets - ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda deleted file mode 100644 index b34418d422b69282353dc134b1c4855e377c1039..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t From 4ec0035ed593dd626d350f1c0fec768b176abf5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:56:18 +0200 Subject: [PATCH 060/409] =?UTF-8?q?=F0=9F=A6=AE=20hound=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/plugin.py | 3 +-- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 ++ 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 51476fef52..95e7add54f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -172,7 +172,7 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self @@ -182,7 +182,6 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: instance_node = hou.node(created_inst.get("instance_node")) - current_data = read(instance_node) new_values = { key: new_value diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 600dad8161..1d0377c818 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -38,7 +38,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): try: output_node = instance.data["output_node"] - except KeyError as e: + except KeyError: six.reraise( PublishValidationError, PublishValidationError( diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 972ac59f49..a0e2302495 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -4,6 +4,8 @@ import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.pipeline import PublishValidationError +import hou + class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """Validate USD loaded paths have no backslashes. From e57b932cf835887726e4711003b7459a0319540a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:09:55 +0200 Subject: [PATCH 061/409] :recycle: move methods around --- openpype/hosts/houdini/api/pipeline.py | 28 -------------------------- openpype/hosts/houdini/api/plugin.py | 24 ++++++++++++++++++---- 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 689d4d711c..c1a5936415 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -430,32 +430,4 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(creator_id=None): - """List all publish instances in the scene. - """ - instance_signature = { - "id": "pyblish.avalon.instance" - } - - return [ - i for i in lib.lsattrs(instance_signature) - if i.parm("creator_identifier").eval() == creator_id - ] - - -def remove_instance(instance): - """Remove specified instance from the scene. - - This is only removing `id` parameter so instance is no longer instance, - because it might contain valuable data for artist. - - """ - # Assume instance node is first node - instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 95e7add54f..ee508f0df4 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,7 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import BoolDef -from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, lsattr class OpenPypeCreatorError(CreatorError): @@ -167,7 +166,11 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - instances = list_instances(creator_id=self.identifier) + instances = [ + i for i in lsattr("id", "pyblish.avalon.instance") + if i.parm("creator_identifier").eval() == self.identifier + ] + if not self.collection_shared_data.get( "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} @@ -194,8 +197,21 @@ class HoudiniCreator(NewCreator): ) def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, + because it might contain valuable data for artist. + + """ for instance in instances: - remove_instance(instance) + instance_node = hou.node(instance.data.get("instance_node")) + to_delete = None + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): From 7b5abe1770bc2736f0b8f09998b8a85889274e5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:11:44 +0200 Subject: [PATCH 062/409] :rotating_light: remove empty lines --- openpype/hosts/houdini/api/pipeline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index c1a5936415..88c9029141 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -428,6 +428,3 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) - - - From 7a2e6bdf780f50d2680edf770955ae2db1cff1cd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Oct 2022 00:10:04 +0200 Subject: [PATCH 063/409] :bug: fix caching --- openpype/hosts/houdini/api/__init__.py | 6 +----- openpype/hosts/houdini/api/plugin.py | 29 +++++++++++++------------- 2 files changed, 16 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index f29df021e1..2663a55f6f 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,9 +1,7 @@ from .pipeline import ( HoudiniHost, ls, - containerise, - list_instances, - remove_instance + containerise ) from .plugin import ( @@ -24,8 +22,6 @@ __all__ = [ "ls", "containerise", - "list_instances", - "remove_instance", "Creator", diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee508f0df4..b7eda7f635 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -162,21 +162,22 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - cached_instances = self.collection_shared_data.get( - "houdini_cached_instances", {}) - instances = cached_instances.get(self.identifier) - if not instances: - instances = [ - i for i in lsattr("id", "pyblish.avalon.instance") - if i.parm("creator_identifier").eval() == self.identifier - ] + # cache instances if missing + if self.collection_shared_data.get("houdini_cached_instances") is None: + self.log.info("Caching instances ...") + self.collection_shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in self.collection_shared_data[ + "houdini_cached_instances"]: + self.collection_shared_data["houdini_cached_instances"][ + creator_id] = [i] + else: + self.collection_shared_data["houdini_cached_instances"][ + creator_id].append(i) - if not self.collection_shared_data.get( - "houdini_cached_instances"): - self.collection_shared_data["houdini_cached_instances"] = {} - self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 - for instance in instances: + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From c27f4cbbf4b671980759d8ae520b2fc724deb9cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 14:48:30 +0200 Subject: [PATCH 064/409] :art: workfile auto-creator --- openpype/hosts/houdini/api/plugin.py | 56 +++++++++----- .../houdini/plugins/create/create_workfile.py | 76 +++++++++++++++++++ .../plugins/publish/collect_current_file.py | 38 +++------- 3 files changed, 124 insertions(+), 46 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/create/create_workfile.py diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b7eda7f635..aae6d137ac 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -35,6 +35,9 @@ class Creator(LegacyCreator): when hovering over a node. The information is visible under the name of the node. + Deprecated: + This creator is deprecated and will be removed in future version. + """ defaults = ['Main'] @@ -91,12 +94,35 @@ class Creator(LegacyCreator): sys.exc_info()[2]) -@six.add_metaclass(ABCMeta) -class HoudiniCreator(NewCreator): - selected_nodes = [] +class HoudiniCreatorBase(object): + @staticmethod + def cache_instances(shared_data): + """Cache instances for Creators to shared data. + + Create `houdini_cached_instances` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("houdini_cached_instances") is None: + shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in shared_data["houdini_cached_instances"]: + shared_data["houdini_cached_instances"][creator_id] = [i] + else: + shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + return shared_data @staticmethod - def _create_instance_node( + def create_instance_node( node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node @@ -117,6 +143,11 @@ class HoudiniCreator(NewCreator): instance_node.moveToGoodPosition() return instance_node + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator, HoudiniCreatorBase): + selected_nodes = [] + def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): @@ -127,7 +158,7 @@ class HoudiniCreator(NewCreator): if node_type is None: node_type = "geometry" - instance_node = self._create_instance_node( + instance_node = self.create_instance_node( subset_name, "/out", node_type) instance_data["instance_node"] = instance_node.path() @@ -163,20 +194,7 @@ class HoudiniCreator(NewCreator): def collect_instances(self): # cache instances if missing - if self.collection_shared_data.get("houdini_cached_instances") is None: - self.log.info("Caching instances ...") - self.collection_shared_data["houdini_cached_instances"] = {} - cached_instances = lsattr("id", "pyblish.avalon.instance") - for i in cached_instances: - creator_id = i.parm("creator_identifier").eval() - if creator_id not in self.collection_shared_data[ - "houdini_cached_instances"]: - self.collection_shared_data["houdini_cached_instances"][ - creator_id] = [i] - else: - self.collection_shared_data["houdini_cached_instances"][ - creator_id].append(i) - + self.cache_instances(self.collection_shared_data) for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py new file mode 100644 index 0000000000..2a7cb14d68 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api.lib import read +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline.legacy_io import Session +from openpype.client import get_asset_by_name + + +class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.houdini.workfile" + label = "Workfile" + family = "workfile" + icon = "gears" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + asset_name = Session["AVALON_ASSET"] + task_name = Session["AVALON_TASK"] + host_name = Session["AVALON_APP"] + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + variant, task_name, asset_doc, + project_name, host_name, current_instance) + ) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + # Update instance context if is not the same + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + def collect_instances(self): + self.cache_instances(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + pass + diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index 1383c274a2..9cca07fdc7 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): +class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] + family = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file""" current_file = hou.hipFile.path() if not os.path.exists(current_file): - # By default Houdini will even point a new scene to a path. + # By default, Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" @@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = current_file + instance.context.data["currentFile"] = current_file folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = legacy_io.Session["AVALON_TASK"] - - data = {} - - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - - data.update({ - "subset": subset, - "asset": os.getenv("AVALON_ASSET", None), - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], + instance.data.update({ "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] }) - data['representations'] = [{ + instance.data['representations'] = [{ 'name': ext.lstrip("."), 'ext': ext.lstrip("."), 'files': file, "stagingDir": folder, }] - instance.data.update(data) - self.log.info('Collected instance: {}'.format(file)) self.log.info('Scene path: {}'.format(current_file)) self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) From 5b154d7a19d66f2e6d5b4f8567f38b441eae9066 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 15:00:17 +0200 Subject: [PATCH 065/409] :bug: fix HDA creation --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67c05b1634..5bb5786a40 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -43,7 +43,7 @@ class CreateHDA(plugin.HoudiniCreator): # if we have `use selection` enabled, and we have some # selected nodes ... subnet = parent_node.collapseIntoSubnet( - self._nodes, + self.selected_nodes, subnet_name="{}_subnet".format(node_name)) subnet.moveToGoodPosition() to_hda = subnet diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index a92d000457..8b97bf364f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- import os - from pprint import pformat - import pyblish.api - from openpype.pipeline import publish +import hou class ExtractHDA(publish.Extractor): @@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data.get("members")[0] + hda_node = hou.node(instance.data.get("instance_node")) hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From a8f1e95696b005cb8466e67ab67d176ac60b1f2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 18:11:06 +0200 Subject: [PATCH 066/409] :bug: workfile instance changes are now persisted --- openpype/hosts/houdini/api/pipeline.py | 8 +-- .../houdini/plugins/create/create_workfile.py | 55 ++++++++++++------- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 88c9029141..6106dd4a6f 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -136,7 +136,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): ) @staticmethod - def _create_context_node(): + def create_context_node(): """Helper for creating context holding node. Returns: @@ -151,20 +151,20 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - op_ctx.hide(True) + # op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() lib.imprint(op_ctx, data) def get_context_data(self): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() return lib.read(op_ctx) def save_file(self, dst_path=None): diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 2a7cb14d68..0c6d840810 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api.lib import read +from openpype.hosts.houdini.api.lib import read, imprint +from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER from openpype.pipeline import CreatedInstance, AutoCreator -from openpype.pipeline.legacy_io import Session +from openpype.pipeline import legacy_io from openpype.client import get_asset_by_name +import hou class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): @@ -12,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "gears" + icon = "document" default_variant = "Main" @@ -25,9 +27,9 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ), None) project_name = self.project_name - asset_name = Session["AVALON_ASSET"] - task_name = Session["AVALON_TASK"] - host_name = Session["AVALON_APP"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if current_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) @@ -44,17 +46,16 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): variant, task_name, asset_doc, project_name, host_name, current_instance) ) - - new_instance = CreatedInstance( + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( self.family, subset_name, data, self ) - self._add_instance_to_context(new_instance) - - # Update instance context if is not the same + self._add_instance_to_context(current_instance) elif ( current_instance["asset"] != asset_name or current_instance["task"] != task_name ): + # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name @@ -63,14 +64,30 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): current_instance["task"] = task_name current_instance["subset"] = subset_name + # write workfile information to context container. + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + workfile_data = {"workfile": current_instance.data_to_store()} + imprint(op_ctx, workfile_data) + def collect_instances(self): - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa - created_instance = CreatedInstance.from_existing( - read(instance), self - ) - self._add_instance_to_context(created_instance) + op_ctx = hou.node(CONTEXT_CONTAINER) + instance = read(op_ctx) + if not instance: + return + workfile = instance.get("workfile") + if not workfile: + return + created_instance = CreatedInstance.from_existing( + workfile, self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): - pass - + op_ctx = hou.node(CONTEXT_CONTAINER) + for created_inst, _changes in update_list: + if created_inst["creator_identifier"] == self.identifier: + workfile_data = {"workfile": created_inst.data_to_store()} + imprint(op_ctx, workfile_data, update=True) From 051189bbca25f08fa1a1403809e92b0a80d49e18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:09 +0200 Subject: [PATCH 067/409] :bug: fix creator id --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 5bb5786a40..590c8f97fd 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -11,7 +11,7 @@ from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - identifier = "hda" + identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" From 6db2c8e33f78d2e6751665c3e22bb8c91b4329ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:54 +0200 Subject: [PATCH 068/409] :recycle: refactor name, collect legacy subsets --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index aae6d137ac..4dc6641ac9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -96,13 +96,15 @@ class Creator(LegacyCreator): class HoudiniCreatorBase(object): @staticmethod - def cache_instances(shared_data): + def cache_subsets(shared_data): """Cache instances for Creators to shared data. - Create `houdini_cached_instances` key when needed in shared data and + Create `houdini_cached_subsets` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. + U + Args: Dict[str, Any]: Shared data. @@ -110,15 +112,26 @@ class HoudiniCreatorBase(object): Dict[str, Any]: Shared data dictionary. """ - if shared_data.get("houdini_cached_instances") is None: - shared_data["houdini_cached_instances"] = {} + if shared_data.get("houdini_cached_subsets") is None: + shared_data["houdini_cached_subsets"] = {} + if shared_data.get("houdini_cached_legacy_subsets") is None: + shared_data["houdini_cached_legacy_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: + if not i.parm("creator_identifier"): + # we have legacy instance + family = i.parm("family").eval() + if family not in shared_data["houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][family] = [i] + else: + shared_data["houdini_cached_legacy_subsets"][family].append(i) + continue + creator_id = i.parm("creator_identifier").eval() - if creator_id not in shared_data["houdini_cached_instances"]: - shared_data["houdini_cached_instances"][creator_id] = [i] + if creator_id not in shared_data["houdini_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -194,8 +207,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From 0fa86d5ce4fd772dfa37fb54eea1dc438680a471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:15 +0200 Subject: [PATCH 069/409] :bug: fix lost pointer issue --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 2452ceef62..13f5a62ec3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -348,6 +348,9 @@ def imprint(node, data, update=False): else: for template in templates: parm_group.appendToFolder(parm_folder, template) + # this is needed because the pointer to folder + # is for some reason lost every call to `appendToFolder()` + parm_folder = parm_group.findFolder("Extra") node.setParmTemplateGroup(parm_group) From 1dcd49576b1c98d200c494fe4cd8658468bca4d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:37 +0200 Subject: [PATCH 070/409] :bug: hide context node by default --- openpype/hosts/houdini/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6106dd4a6f..b0791fcb6c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -151,7 +151,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - # op_ctx.hide(True) + op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): From 20d111d60a1c0ac431adfc8567eeac87679b144a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:38:02 +0200 Subject: [PATCH 071/409] :sparkles: add legacy subset converter --- .../houdini/plugins/create/convert_legacy.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/create/convert_legacy.py diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py new file mode 100644 index 0000000000..be7ef714ba --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.houdini.api.lib import imprint + + +class HoudiniLegacyConvertor(SubsetConvertorPlugin): + identifier = "io.openpype.creators.houdini.legacy" + family_to_id = { + "camera": "io.openpype.creators.houdini.camera", + "ass": "io.openpype.creators.houdini.ass", + "imagesequence": "io.openpype.creators.houdini.imagesequence", + "hda": "io.openpype.creators.houdini.hda", + "pointcache": "io.openpype.creators.houdini.pointcache", + "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy", + "redshift_rop": "io.openpype.creators.houdini.redshift_rop", + "usd": "io.openpype.creators.houdini.usd", + "usdrender": "io.openpype.creators.houdini.usdrender", + "vdbcache": "io.openpype.creators.houdini.vdbcache" + } + + def __init__(self, *args, **kwargs): + super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) + self.legacy_subsets = {} + + def find_instances(self): + self.legacy_subsets = self.collection_shared_data.get( + "houdini_cached_legacy_subsets") + if not self.legacy_subsets: + return + self.add_convertor_item("Found {} incompatible subset{}.".format( + len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") + ) + + def convert(self): + if not self.legacy_subsets: + return + + for family, subsets in self.legacy_subsets.items(): + if family in self.family_to_id: + for subset in subsets: + data = { + "creator_identifier": self.family_to_id[family], + "instance_node": subset.path() + } + print("Converting {} to {}".format( + subset.path(), self.family_to_id[family])) + imprint(subset, data) From 8a1040aa7495aa6c3578033c5f6bad0321ec209d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 18:26:36 +0200 Subject: [PATCH 072/409] :rotating_light: various :dog: fixes and docstrings --- openpype/hosts/houdini/api/plugin.py | 27 ++++++++++++------- .../houdini/plugins/create/convert_legacy.py | 27 +++++++++++++++++++ 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 4dc6641ac9..b5f79838d1 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -103,7 +103,9 @@ class HoudiniCreatorBase(object): fill it with all collected instances from the scene under its respective creator identifiers. - U + If legacy instances are detected in the scene, create + `houdini_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. Args: Dict[str, Any]: Shared data. @@ -121,17 +123,21 @@ class HoudiniCreatorBase(object): if not i.parm("creator_identifier"): # we have legacy instance family = i.parm("family").eval() - if family not in shared_data["houdini_cached_legacy_subsets"]: - shared_data["houdini_cached_legacy_subsets"][family] = [i] + if family not in shared_data[ + "houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][ + family] = [i] else: - shared_data["houdini_cached_legacy_subsets"][family].append(i) + shared_data[ + "houdini_cached_legacy_subsets"][family].append(i) continue creator_id = i.parm("creator_identifier").eval() if creator_id not in shared_data["houdini_cached_subsets"]: shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -159,6 +165,7 @@ class HoudiniCreatorBase(object): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator, HoudiniCreatorBase): + """Base class for most of the Houdini creator plugins.""" selected_nodes = [] def create(self, subset_name, instance_data, pre_create_data): @@ -208,7 +215,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing self.cache_subsets(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa + for instance in self.collection_shared_data[ + "houdini_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( read(instance), self ) @@ -231,11 +239,10 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def remove_instances(self, instances): """Remove specified instance from the scene. - This is only removing `id` parameter so instance is no longer - instance, - because it might contain valuable data for artist. + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. - """ + """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) to_delete = None diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index be7ef714ba..2f3d1ef708 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -1,9 +1,22 @@ # -*- coding: utf-8 -*- +"""Convertor for legacy Houdini subsets.""" from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin from openpype.hosts.houdini.api.lib import imprint class HoudiniLegacyConvertor(SubsetConvertorPlugin): + """Find and convert any legacy subsets in the scene. + + This Convertor will find all legacy subsets in the scene and will + transform them to the current system. Since the old subsets doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating subset + of the same family and there is no way to handle it. This code should + nevertheless cover all creators that came with OpenPype. + + """ identifier = "io.openpype.creators.houdini.legacy" family_to_id = { "camera": "io.openpype.creators.houdini.camera", @@ -23,6 +36,15 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): self.legacy_subsets = {} def find_instances(self): + """Find legacy subsets in the scene. + + Legacy subsets are the ones that doesn't have `creator_identifier` + parameter on them. + + This is using cached entries done in + :py:meth:`~HoudiniCreatorBase.cache_subsets()` + + """ self.legacy_subsets = self.collection_shared_data.get( "houdini_cached_legacy_subsets") if not self.legacy_subsets: @@ -32,6 +54,11 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): ) def convert(self): + """Convert all legacy subsets to current. + + It is enough to add `creator_identifier` and `instance_node`. + + """ if not self.legacy_subsets: return From 4be13d4324cbf7efc9128cb613f4fe3456e1416e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 22:55:09 +0200 Subject: [PATCH 073/409] :recycle: switch print for log --- openpype/hosts/houdini/plugins/create/convert_legacy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index 2f3d1ef708..4b8041b4f5 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -69,6 +69,6 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): "creator_identifier": self.family_to_id[family], "instance_node": subset.path() } - print("Converting {} to {}".format( + self.log.info("Converting {} to {}".format( subset.path(), self.family_to_id[family])) imprint(subset, data) From 57b81b4b5b5cd4ab98bfb9d73a8a69ba208bb061 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 14:41:58 +0200 Subject: [PATCH 074/409] hiero: loading effects --- .../hosts/hiero/plugins/load/load_effects.py | 259 ++++++++++++++++++ 1 file changed, 259 insertions(+) create mode 100644 openpype/hosts/hiero/plugins/load/load_effects.py diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py new file mode 100644 index 0000000000..40f8d66d0c --- /dev/null +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -0,0 +1,259 @@ +import json +from collections import OrderedDict +from pprint import pprint +import six + +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + load +) +from openpype.hosts.hiero import api as phiero +from openpype.hosts.hiero.api import tags + + +class LoadEffects(load.LoaderPlugin): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["effectJson"] + families = ["effect"] + + label = "Load Effects" + order = 0 + icon = "cc" + color = "white" + ignore_attr = ["useLifetime"] + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + active_sequence = phiero.get_current_sequence() + active_track = phiero.get_current_track( + active_sequence, "LoadedEffects") + + # get main variables + version = context['version'] + version_data = version.get("data", {}) + vname = version.get("name", None) + namespace = namespace or context['asset']['name'] + object_name = "{}_{}".format(name, namespace) + + data_imprint = { + "source": version_data["source"], + "version": vname, + "author": version_data["author"], + } + + # getting file path + file = self.fname.replace("\\", "/") + + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).items()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f) + + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + + for ef_name, ef_val in nodes_order.items(): + pprint("_" * 100) + pprint(ef_name) + pprint(ef_val) + new_name = "{}_loaded".format(ef_name) + if new_name not in used_subtracks: + effect_track_item = active_track.createEffect( + effectType=ef_val["class"], + timelineIn=ef_val["timelineIn"], + timelineOut=ef_val["timelineOut"] + ) + effect_track_item.setName(new_name) + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue + node[knob_name].setValue(knob_value) + + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + return + + def update(self, container, representation): + """Update the Loader's path + + Nuke automatically tries to reset some variables when changing + the loader's path to a new file. These automatic changes are to its + inputs: + + """ + pass + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items() + if isinstance(v, dict)] + subTrackNums = [v["subTrackIndex"] for k, v in data.items() + if isinstance(v, dict)] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if isinstance(val, dict) + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes through all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.items()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, six.text_type): + return str(input) + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + pass + + def containerise( + self, + track, + name, + namespace, + object_name, + context, + loader=None, + data=None + ): + """Bundle Hiero's object into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + track_item (hiero.core.TrackItem): object to imprint as container + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + context (dict): Asset information + loader (str, optional): Name of node used to produce this container. + + Returns: + track_item (hiero.core.TrackItem): containerised object + + """ + + data_imprint = { + object_name: { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": str(name), + "namespace": str(namespace), + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + } + + if data: + for k, v in data.items(): + data_imprint[object_name].update({k: v}) + + self.log.debug("_ data_imprint: {}".format(data_imprint)) + self.set_track_openpype_tag(track, data_imprint) + + def set_track_openpype_tag(self, track, data=None): + """ + Set pype track item tag to input track_item. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = self.get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag(phiero.pype_tag_name, tag_data) + # add it to the input track item + track.addTag(tag) + + return tag + + def get_track_openpype_tag(self, track): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == phiero.pype_tag_name: + return tag From b04fc48fbc475f671c0876c6d05cfca79c6d95c0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 16:49:38 +0200 Subject: [PATCH 075/409] hiero: fix - skip audio in collect effects --- openpype/hosts/hiero/plugins/publish/collect_clip_effects.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 8d2ed9a9c2..9489b1c4fb 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -16,6 +16,9 @@ class CollectClipEffects(pyblish.api.InstancePlugin): review_track_index = instance.context.data.get("reviewTrackIndex") item = instance.data["item"] + if "audio" in instance.data["family"]: + return + # frame range self.handle_start = instance.data["handleStart"] self.handle_end = instance.data["handleEnd"] From 21a3d2067e1732a14c3273a8ba6c2429ac8f7a19 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 28 Oct 2022 16:50:30 +0200 Subject: [PATCH 076/409] hiero: load effects update - adding order - adding clip in out definition --- .../hosts/hiero/plugins/load/load_effects.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 40f8d66d0c..3158f29d93 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -41,11 +41,13 @@ class LoadEffects(load.LoaderPlugin): active_sequence, "LoadedEffects") # get main variables - version = context['version'] + version = context["version"] version_data = version.get("data", {}) vname = version.get("name", None) - namespace = namespace or context['asset']['name'] + namespace = namespace or context["asset"]["name"] object_name = "{}_{}".format(name, namespace) + clip_in = context["asset"]["data"]["clipIn"] + clip_out = context["asset"]["data"]["clipOut"] data_imprint = { "source": version_data["source"], @@ -69,7 +71,8 @@ class LoadEffects(load.LoaderPlugin): for stitem in phiero.flatten(active_track.subTrackItems()) } - for ef_name, ef_val in nodes_order.items(): + loaded = False + for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): pprint("_" * 100) pprint(ef_name) pprint(ef_val) @@ -77,8 +80,10 @@ class LoadEffects(load.LoaderPlugin): if new_name not in used_subtracks: effect_track_item = active_track.createEffect( effectType=ef_val["class"], - timelineIn=ef_val["timelineIn"], - timelineOut=ef_val["timelineOut"] + timelineIn=clip_in, + timelineOut=clip_out, + subTrackIndex=index_order + ) effect_track_item.setName(new_name) node = effect_track_item.node() @@ -90,6 +95,12 @@ class LoadEffects(load.LoaderPlugin): continue node[knob_name].setValue(knob_value) + # make sure containerisation will happen + loaded = True + + if not loaded: + return + self.containerise( active_track, name=name, @@ -98,7 +109,6 @@ class LoadEffects(load.LoaderPlugin): context=context, loader=self.__class__.__name__, data=data_imprint) - return def update(self, container, representation): """Update the Loader's path From 49ebb5aa0118a8535250743400789efdf952ba90 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:05:19 +0100 Subject: [PATCH 077/409] hiero: abstraction for effect loader tag operations --- openpype/hosts/hiero/api/lib.py | 62 +++++++++++++++++-- openpype/hosts/hiero/api/pipeline.py | 18 ++++-- .../hosts/hiero/plugins/load/load_effects.py | 52 ---------------- 3 files changed, 70 insertions(+), 62 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e5d35945af..9e626270f8 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -321,13 +321,67 @@ def get_track_item_pype_tag(track_item): return tag -def set_track_item_pype_tag(track_item, data=None): +def set_track_openpype_tag(track, data=None): """ - Set pype track item tag to input track_item. + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag(self.pype_tag_name, tag_data) + # add it to the input track item + track.addTag(tag) + + return tag + + +def get_track_openpype_tag(track): + """ + Get pype track item tag created by creator or loader plugin. Attributes: trackItem (hiero.core.TrackItem): hiero object + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == self.pype_tag_name: + return tag + + +def set_trackitem_openpype_tag(track_item, data=None): + """ + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + Returns: hiero.core.Tag """ @@ -1083,10 +1137,10 @@ def check_inventory_versions(track_items=None): project_name = legacy_io.active_project() filter_result = filter_containers(containers, project_name) for container in filter_result.latest: - set_track_color(container["_track_item"], clip_color) + set_track_color(container["_item"], clip_color) for container in filter_result.outdated: - set_track_color(container["_track_item"], clip_color_last) + set_track_color(container["_item"], clip_color_last) def selection_changed_timeline(event): diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index ea61dc4785..1b78159e04 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -17,6 +17,7 @@ from openpype.pipeline import ( ) from openpype.tools.utils import host_tools from . import lib, menu, events +import hiero log = Logger.get_logger(__name__) @@ -131,11 +132,12 @@ def ls(): yield container -def parse_container(track_item, validate=True): +def parse_container(item, validate=True): """Return container data from track_item's pype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. validate (bool)[optional]: validating with avalon scheme Returns: @@ -143,7 +145,11 @@ def parse_container(track_item, validate=True): """ # convert tag metadata to normal keys names - data = lib.get_track_item_pype_data(track_item) + if type(item) == hiero.core.VideoTrack: + data = lib.set_track_openpype_data(item) + else: + data = lib.set_track_item_pype_data(item) + if ( not data or data.get("id") != "pyblish.avalon.container" @@ -160,15 +166,15 @@ def parse_container(track_item, validate=True): required = ['schema', 'id', 'name', 'namespace', 'loader', 'representation'] - if not all(key in data for key in required): + if any(key not in data for key in required): return container = {key: data[key] for key in required} - container["objectName"] = track_item.name() + container["objectName"] = item.name() # Store reference to the node object - container["_track_item"] = track_item + container["_item"] = item return container diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 3158f29d93..947655b4c8 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -215,55 +215,3 @@ class LoadEffects(load.LoaderPlugin): self.log.debug("_ data_imprint: {}".format(data_imprint)) self.set_track_openpype_tag(track, data_imprint) - - def set_track_openpype_tag(self, track, data=None): - """ - Set pype track item tag to input track_item. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag - """ - data = data or {} - - # basic Tag's attribute - tag_data = { - "editable": "0", - "note": "OpenPype data container", - "icon": "openpype_icon.png", - "metadata": dict(data.items()) - } - # get available pype tag if any - _tag = self.get_track_openpype_tag(track) - - if _tag: - # it not tag then create one - tag = tags.update_tag(_tag, tag_data) - else: - # if pype tag available then update with input data - tag = tags.create_tag(phiero.pype_tag_name, tag_data) - # add it to the input track item - track.addTag(tag) - - return tag - - def get_track_openpype_tag(self, track): - """ - Get pype track item tag created by creator or loader plugin. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag: hierarchy, orig clip attributes - """ - # get all tags from track item - _tags = track.tags() - if not _tags: - return None - for tag in _tags: - # return only correct tag defined by global name - if tag.name() == phiero.pype_tag_name: - return tag From dcf4688e1c8802510e04ac95f74c0968500a8c52 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:16:47 +0100 Subject: [PATCH 078/409] hiero: renaming functions, with backward compatibility --- openpype/hosts/hiero/api/__init__.py | 12 ++-- openpype/hosts/hiero/api/lib.py | 69 +++++++++++-------- openpype/hosts/hiero/api/pipeline.py | 10 +-- .../plugins/publish/precollect_instances.py | 2 +- 4 files changed, 54 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index 781f846bbe..d0fb24b654 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,9 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, - get_track_item_pype_tag, - set_track_item_pype_tag, - get_track_item_pype_data, + get_trackitem_openpype_tag, + set_trackitem_openpype_tag, + get_trackitem_openpype_data, set_publish_attribute, get_publish_attribute, imprint, @@ -85,9 +85,9 @@ __all__ = [ "get_timeline_selection", "get_current_track", "get_track_item_tags", - "get_track_item_pype_tag", - "set_track_item_pype_tag", - "get_track_item_pype_data", + "get_trackitem_openpype_tag", + "set_trackitem_openpype_tag", + "get_trackitem_openpype_data", "set_publish_attribute", "get_publish_attribute", "imprint", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 9e626270f8..b0da4ce7b3 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -301,26 +301,6 @@ def get_track_item_tags(track_item): return returning_tag_data -def get_track_item_pype_tag(track_item): - """ - Get pype track item tag created by creator or loader plugin. - - Attributes: - trackItem (hiero.core.TrackItem): hiero object - - Returns: - hiero.core.Tag: hierarchy, orig clip attributes - """ - # get all tags from track item - _tags = track_item.tags() - if not _tags: - return None - for tag in _tags: - # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: - return tag - - def set_track_openpype_tag(track, data=None): """ Set openpype track tag to input track object. @@ -375,6 +355,41 @@ def get_track_openpype_tag(track): return tag +def get_track_item_pype_tag(track_item): + # backward compatibility alias + return get_trackitem_openpype_tag(track_item) + + +def set_track_item_pype_tag(track_item, data=None): + # backward compatibility alias + return set_trackitem_openpype_tag(track_item, data) + + +def get_track_item_pype_data(track_item): + # backward compatibility alias + return get_trackitem_openpype_data(track_item) + + +def get_trackitem_openpype_tag(track_item): + """ + Get pype track item tag created by creator or loader plugin. + + Attributes: + trackItem (hiero.core.TrackItem): hiero object + + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track_item.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if tag.name() == self.pype_tag_name: + return tag + + def set_trackitem_openpype_tag(track_item, data=None): """ Set openpype track tag to input track object. @@ -395,7 +410,7 @@ def set_trackitem_openpype_tag(track_item, data=None): "metadata": dict(data.items()) } # get available pype tag if any - _tag = get_track_item_pype_tag(track_item) + _tag = get_trackitem_openpype_tag(track_item) if _tag: # it not tag then create one @@ -409,7 +424,7 @@ def set_trackitem_openpype_tag(track_item, data=None): return tag -def get_track_item_pype_data(track_item): +def get_trackitem_openpype_data(track_item): """ Get track item's pype tag data. @@ -421,7 +436,7 @@ def get_track_item_pype_data(track_item): """ data = {} # get pype data tag from track item - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) if not tag: return None @@ -474,7 +489,7 @@ def imprint(track_item, data=None): """ data = data or {} - tag = set_track_item_pype_tag(track_item, data) + tag = set_trackitem_openpype_tag(track_item, data) # add publish attribute set_publish_attribute(tag, True) @@ -1084,7 +1099,7 @@ def sync_clip_name_to_data_asset(track_items_list): # get name and data ti_name = track_item.name() - data = get_track_item_pype_data(track_item) + data = get_trackitem_openpype_data(track_item) # ignore if no data on the clip or not publish instance if not data: @@ -1096,10 +1111,10 @@ def sync_clip_name_to_data_asset(track_items_list): if data["asset"] != ti_name: data["asset"] = ti_name # remove the original tag - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) track_item.removeTag(tag) # create new tag with updated data - set_track_item_pype_tag(track_item, data) + set_trackitem_openpype_tag(track_item, data) print("asset was changed in clip: {}".format(ti_name)) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1b78159e04..0c11f7072f 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -107,7 +107,7 @@ def containerise(track_item, data_imprint.update({k: v}) log.debug("_ data_imprint: {}".format(data_imprint)) - lib.set_track_item_pype_tag(track_item, data_imprint) + lib.set_trackitem_openpype_tag(track_item, data_imprint) return track_item @@ -192,7 +192,7 @@ def update_container(track_item, data=None): """ data = data or dict() - container = lib.get_track_item_pype_data(track_item) + container = lib.get_trackitem_openpype_data(track_item) for _key, _value in container.items(): try: @@ -201,7 +201,7 @@ def update_container(track_item, data=None): pass log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_track_item_pype_tag(track_item, container)) + return bool(lib.set_trackitem_openpype_tag(track_item, container)) def launch_workfiles_app(*args): @@ -278,11 +278,11 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): instance, old_value, new_value)) from openpype.hosts.hiero.api import ( - get_track_item_pype_tag, + get_trackitem_openpype_tag, set_publish_attribute ) # Whether instances should be passthrough based on new value track_item = instance.data["item"] - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) set_publish_attribute(tag, new_value) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 1fc4b1f696..bb02919b35 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -48,7 +48,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin): self.log.debug("clip_name: {}".format(clip_name)) # get openpype tag data - tag_data = phiero.get_track_item_pype_data(track_item) + tag_data = phiero.get_trackitem_openpype_data(track_item) self.log.debug("__ tag_data: {}".format(pformat(tag_data))) if not tag_data: From a9ab5baac9903c5a307d737201b12e14ecdbbf85 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:26:48 +0100 Subject: [PATCH 079/409] hiero: improving bckw compatibility after rename --- openpype/hosts/hiero/api/__init__.py | 7 +++ openpype/hosts/hiero/api/lib.py | 75 +++++++++++++++++++++------- 2 files changed, 65 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index d0fb24b654..f457d791f5 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,6 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, + get_track_item_pype_tag, + set_track_item_pype_tag, + get_track_item_pype_data, get_trackitem_openpype_tag, set_trackitem_openpype_tag, get_trackitem_openpype_data, @@ -99,6 +102,10 @@ __all__ = [ "apply_colorspace_project", "apply_colorspace_clips", "get_sequence_pattern_and_padding", + # depricated + "get_track_item_pype_tag", + "set_track_item_pype_tag", + "get_track_item_pype_data", # plugins "CreatorWidget", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index b0da4ce7b3..f4b80aea4e 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -7,11 +7,13 @@ import os import re import sys import platform +import functools +import warnings import ast import shutil import hiero -from Qt import QtWidgets +from Qt import QtWidgets, QtCore, QtXml from openpype.client import get_project from openpype.settings import get_project_settings @@ -20,15 +22,51 @@ from openpype.pipeline.load import filter_containers from openpype.lib import Logger from . import tags -try: - from PySide.QtCore import QFile, QTextStream - from PySide.QtXml import QDomDocument -except ImportError: - from PySide2.QtCore import QFile, QTextStream - from PySide2.QtXml import QDomDocument -# from opentimelineio import opentime -# from pprint import pformat +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + log = Logger.get_logger(__name__) @@ -355,16 +393,19 @@ def get_track_openpype_tag(track): return tag +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") def get_track_item_pype_tag(track_item): # backward compatibility alias return get_trackitem_openpype_tag(track_item) +@deprecated("openpype.hosts.hiero.api.lib.set_trackitem_openpype_tag") def set_track_item_pype_tag(track_item, data=None): # backward compatibility alias return set_trackitem_openpype_tag(track_item, data) +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_data") def get_track_item_pype_data(track_item): # backward compatibility alias return get_trackitem_openpype_data(track_item) @@ -901,22 +942,22 @@ def set_selected_track_items(track_items_list, sequence=None): def _read_doc_from_path(path): - # reading QDomDocument from HROX path - hrox_file = QFile(path) - if not hrox_file.open(QFile.ReadOnly): + # reading QtXml.QDomDocument from HROX path + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.ReadOnly): raise RuntimeError("Failed to open file for reading") - doc = QDomDocument() + doc = QtXml.QDomDocument() doc.setContent(hrox_file) hrox_file.close() return doc def _write_doc_to_path(doc, path): - # write QDomDocument to path as HROX - hrox_file = QFile(path) - if not hrox_file.open(QFile.WriteOnly): + # write QtXml.QDomDocument to path as HROX + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.WriteOnly): raise RuntimeError("Failed to open file for writing") - stream = QTextStream(hrox_file) + stream = QtCore.QTextStream(hrox_file) doc.save(stream, 1) hrox_file.close() From 04d1016dfa71d5630e7b920371d3b4ea42e2fcff Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 11:43:34 +0100 Subject: [PATCH 080/409] hiero: update api --- openpype/hosts/hiero/api/__init__.py | 6 ++++ openpype/hosts/hiero/api/lib.py | 51 ++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index f457d791f5..1fa40c9f74 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,6 +30,9 @@ from .lib import ( get_timeline_selection, get_current_track, get_track_item_tags, + get_track_openpype_tag, + set_track_openpype_tag, + get_track_openpype_data, get_track_item_pype_tag, set_track_item_pype_tag, get_track_item_pype_data, @@ -88,6 +91,9 @@ __all__ = [ "get_timeline_selection", "get_current_track", "get_track_item_tags", + "get_track_openpype_tag", + "set_track_openpype_tag", + "get_track_openpype_data", "get_trackitem_openpype_tag", "set_trackitem_openpype_tag", "get_trackitem_openpype_data", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index f4b80aea4e..3c1d500e46 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -393,6 +393,57 @@ def get_track_openpype_tag(track): return tag +def get_track_openpype_data(track): + """ + Get track's openpype tag data. + + Attributes: + trackItem (hiero.core.VideoTrack): hiero object + + Returns: + dict: data found on pype tag + """ + return_data = {} + # get pype data tag from track item + tag = get_track_openpype_tag(track) + + if not tag: + return None + + # get tag metadata attribute + tag_data = deepcopy(dict(tag.metadata())) + + for obj_name, obj_data in tag_data.items(): + return_data[obj_name] = {} + + # convert tag metadata to normal keys names and values to correct types + for k, v in obj_data.items(): + + key = k.replace("tag.", "") + + try: + # capture exceptions which are related to strings only + if re.match(r"^[\d]+$", v): + value = int(v) + elif re.match(r"^True$", v): + value = True + elif re.match(r"^False$", v): + value = False + elif re.match(r"^None$", v): + value = None + elif re.match(r"^[\w\d_]+$", v): + value = v + else: + value = ast.literal_eval(v) + except (ValueError, SyntaxError) as msg: + log.warning(msg) + value = v + + return_data[obj_name][key] = value + + return return_data + + @deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") def get_track_item_pype_tag(track_item): # backward compatibility alias From 00c2ac36c5c90181db330fba8f10ca6b094c96db Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 11:50:11 +0100 Subject: [PATCH 081/409] Fix enable state of "no registered families" item --- openpype/tools/creator/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/creator/model.py b/openpype/tools/creator/model.py index d3d60b96f2..307993103b 100644 --- a/openpype/tools/creator/model.py +++ b/openpype/tools/creator/model.py @@ -36,7 +36,7 @@ class CreatorsModel(QtGui.QStandardItemModel): if not items: item = QtGui.QStandardItem("No registered families") item.setEnabled(False) - item.setData(QtCore.Qt.ItemIsEnabled, False) + item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) self.invisibleRootItem().appendRows(items) From 66571cc8cded1b6329f839cd9425da2631531a67 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 12:16:08 +0100 Subject: [PATCH 082/409] hiero: update parse_container and ls to new functionality accepting track containers --- openpype/hosts/hiero/api/pipeline.py | 85 +++++++++++++++++----------- 1 file changed, 51 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 0c11f7072f..1ce8e4e1c5 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -124,11 +124,20 @@ def ls(): """ # get all track items from current timeline - all_track_items = lib.get_track_items() + all_items = lib.get_track_items() - for track_item in all_track_items: - container = parse_container(track_item) - if container: + # append all video tracks + for track in lib.get_current_sequence(): + if type(track) != hiero.core.VideoTrack: + continue + all_items.append(track) + + for item in all_items: + container = parse_container(item) + if isinstance(container, list): + for _c in container: + yield _c + elif container: yield container @@ -144,39 +153,47 @@ def parse_container(item, validate=True): dict: The container schema data for input containerized track item. """ + def data_to_container(item, data): + if ( + not data + or data.get("id") != "pyblish.avalon.container" + ): + return + + if validate and data and data.get("schema"): + schema.validate(data) + + if not isinstance(data, dict): + return + + # If not all required data return the empty container + required = ['schema', 'id', 'name', + 'namespace', 'loader', 'representation'] + + if any(key not in data for key in required): + return + + container = {key: data[key] for key in required} + + container["objectName"] = item.name() + + # Store reference to the node object + container["_item"] = item + + return container + # convert tag metadata to normal keys names if type(item) == hiero.core.VideoTrack: - data = lib.set_track_openpype_data(item) + return_list = [] + _data = lib.get_track_openpype_data(item) + # convert the data to list and validate them + for _, obj_data in _data.items(): + cotnainer = data_to_container(item, obj_data) + return_list.append(cotnainer) + return return_list else: - data = lib.set_track_item_pype_data(item) - - if ( - not data - or data.get("id") != "pyblish.avalon.container" - ): - return - - if validate and data and data.get("schema"): - schema.validate(data) - - if not isinstance(data, dict): - return - - # If not all required data return the empty container - required = ['schema', 'id', 'name', - 'namespace', 'loader', 'representation'] - - if any(key not in data for key in required): - return - - container = {key: data[key] for key in required} - - container["objectName"] = item.name() - - # Store reference to the node object - container["_item"] = item - - return container + _data = lib.get_track_item_pype_data(item) + return data_to_container(item, _data) def update_container(track_item, data=None): From 25b61d3fdf657f38db35c741456d680cb1c24b59 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 12:17:34 +0100 Subject: [PATCH 083/409] hiero: refactor plugin to new abstracted functionality --- openpype/hosts/hiero/plugins/load/load_effects.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 947655b4c8..fa78684838 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -8,7 +8,6 @@ from openpype.pipeline import ( load ) from openpype.hosts.hiero import api as phiero -from openpype.hosts.hiero.api import tags class LoadEffects(load.LoaderPlugin): @@ -53,6 +52,7 @@ class LoadEffects(load.LoaderPlugin): "source": version_data["source"], "version": vname, "author": version_data["author"], + "children_names": [] } # getting file path @@ -95,6 +95,8 @@ class LoadEffects(load.LoaderPlugin): continue node[knob_name].setValue(knob_value) + # register all loaded children + data_imprint["children_names"].append(new_name) # make sure containerisation will happen loaded = True @@ -187,11 +189,13 @@ class LoadEffects(load.LoaderPlugin): for loaded assets. Arguments: - track_item (hiero.core.TrackItem): object to imprint as container + track (hiero.core.VideoTrack): object to imprint as container name (str): Name of resulting assembly namespace (str): Namespace under which to host container + object_name (str): name of container context (dict): Asset information - loader (str, optional): Name of node used to produce this container. + loader (str, optional): Name of node used to produce this + container. Returns: track_item (hiero.core.TrackItem): containerised object @@ -214,4 +218,4 @@ class LoadEffects(load.LoaderPlugin): data_imprint[object_name].update({k: v}) self.log.debug("_ data_imprint: {}".format(data_imprint)) - self.set_track_openpype_tag(track, data_imprint) + phiero.set_track_openpype_tag(track, data_imprint) From b12bb8723040d3c76cff92f6fd1c7b1bef9a5549 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 13:57:03 +0100 Subject: [PATCH 084/409] hiero: refactor update container function --- openpype/hosts/hiero/api/pipeline.py | 41 ++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1ce8e4e1c5..1e4158261c 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -196,29 +196,46 @@ def parse_container(item, validate=True): return data_to_container(item, _data) -def update_container(track_item, data=None): - """Update container data to input track_item's pype tag. +def update_container(item, data=None): + """Update container data to input track_item or track's + openpype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. data (dict)[optional]: dictionery with data to be updated Returns: bool: True if container was updated correctly """ - data = data or dict() + def update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass + return container - container = lib.get_trackitem_openpype_data(track_item) + data = data or {} - for _key, _value in container.items(): - try: - container[_key] = data[_key] - except KeyError: - pass + if type(item) == hiero.core.VideoTrack: + object_name = "{}_{}".format( + data["name"], data["namespace"]) + containers = lib.get_track_openpype_data(item) + for obj_name, container in containers.items(): + if object_name != obj_name: + continue + updated_container = update_container_data(container, data) + containers.update(updated_container) - log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_trackitem_openpype_tag(track_item, container)) + return bool(lib.set_track_openpype_tag(item, containers)) + else: + container = lib.get_trackitem_openpype_data(item) + updated_container = update_container_data(container, data) + + log.info("Updating container: `{}`".format(item.name())) + return bool(lib.set_trackitem_openpype_tag(item, updated_container)) def launch_workfiles_app(*args): From 4ab8fd1a822d6c2e9f60d3eb4933eee61e381208 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 14:01:44 +0100 Subject: [PATCH 085/409] hiero: updating doc strings --- openpype/hosts/hiero/api/pipeline.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 1e4158261c..e9e16ef5b1 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -220,13 +220,19 @@ def update_container(item, data=None): data = data or {} if type(item) == hiero.core.VideoTrack: + # form object data for test object_name = "{}_{}".format( data["name"], data["namespace"]) + + # get all available containers containers = lib.get_track_openpype_data(item) for obj_name, container in containers.items(): + # ignore all which are not the same object if object_name != obj_name: continue + # update data in container updated_container = update_container_data(container, data) + # merge updated container back to containers containers.update(updated_container) return bool(lib.set_track_openpype_tag(item, containers)) From 65e7c45e94ed1fb19dc512ce8ced91506dd2efec Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 17:13:09 +0100 Subject: [PATCH 086/409] hiero: wip updating effect containers --- openpype/hosts/hiero/api/lib.py | 32 ++++--------------- openpype/hosts/hiero/api/pipeline.py | 12 ++++--- openpype/hosts/hiero/api/tags.py | 22 ++++++++----- .../hosts/hiero/plugins/load/load_effects.py | 1 + 4 files changed, 29 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 3c1d500e46..d04a710df1 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -9,6 +9,7 @@ import sys import platform import functools import warnings +import json import ast import shutil import hiero @@ -414,32 +415,11 @@ def get_track_openpype_data(track): tag_data = deepcopy(dict(tag.metadata())) for obj_name, obj_data in tag_data.items(): - return_data[obj_name] = {} - - # convert tag metadata to normal keys names and values to correct types - for k, v in obj_data.items(): - - key = k.replace("tag.", "") - - try: - # capture exceptions which are related to strings only - if re.match(r"^[\d]+$", v): - value = int(v) - elif re.match(r"^True$", v): - value = True - elif re.match(r"^False$", v): - value = False - elif re.match(r"^None$", v): - value = None - elif re.match(r"^[\w\d_]+$", v): - value = v - else: - value = ast.literal_eval(v) - except (ValueError, SyntaxError) as msg: - log.warning(msg) - value = v - - return_data[obj_name][key] = value + obj_name = obj_name.replace("tag.", "") + print(obj_name) + if obj_name in ["applieswhole", "note", "label"]: + continue + return_data[obj_name] = json.loads(obj_data) return return_data diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index e9e16ef5b1..26c8ebe6d3 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -133,12 +133,13 @@ def ls(): all_items.append(track) for item in all_items: - container = parse_container(item) + container_data = parse_container(item) if isinstance(container, list): - for _c in container: + if isinstance(container_data, list): + for _c in container_data: yield _c - elif container: - yield container + elif container_data: + yield container_data def parse_container(item, validate=True): @@ -186,6 +187,9 @@ def parse_container(item, validate=True): if type(item) == hiero.core.VideoTrack: return_list = [] _data = lib.get_track_openpype_data(item) + log.info("_data: {}".format(_data)) + if not _data: + return # convert the data to list and validate them for _, obj_data in _data.items(): cotnainer = data_to_container(item, obj_data) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fac26da03a..918af3dc1f 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -1,3 +1,4 @@ +import json import re import os import hiero @@ -85,17 +86,22 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # due to hiero bug we have to make sure keys which are not existent in - # data are cleared of value by `None` - for _mk in mtd.dict().keys(): - if _mk.replace("tag.", "") not in data_mtd.keys(): - mtd.setValue(_mk, str(None)) + # # due to hiero bug we have to make sure keys which are not existent in + # # data are cleared of value by `None` + # for _mk in mtd.dict().keys(): + # if _mk.replace("tag.", "") not in data_mtd.keys(): + # mtd.setValue(_mk, str(None)) # set all data metadata to tag metadata - for k, v in data_mtd.items(): + for _k, _v in data_mtd.items(): + value = str(_v) + if type(_v) == dict: + value = json.dumps(_v) + + # set the value mtd.setValue( - "tag.{}".format(str(k)), - str(v) + "tag.{}".format(str(_k)), + value ) # set note description of tag diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index fa78684838..16c9187ad9 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -52,6 +52,7 @@ class LoadEffects(load.LoaderPlugin): "source": version_data["source"], "version": vname, "author": version_data["author"], + "objectName": object_name, "children_names": [] } From 2c4d37d1bfef0b83134bd00775b00815493366cc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:23:22 +0100 Subject: [PATCH 087/409] added create next overlay widget --- openpype/tools/publisher/widgets/__init__.py | 2 + openpype/tools/publisher/widgets/widgets.py | 201 +++++++++++++++++++ 2 files changed, 203 insertions(+) diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index a02c69d5e0..042985b007 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -8,6 +8,7 @@ from .widgets import ( ResetBtn, ValidateBtn, PublishBtn, + CreateNextPageOverlay, ) from .help_widget import ( HelpButton, @@ -28,6 +29,7 @@ __all__ = ( "ResetBtn", "ValidateBtn", "PublishBtn", + "CreateNextPageOverlay", "HelpButton", "HelpDialog", diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d4c2623790..507ecedb0f 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1652,3 +1652,204 @@ class ThumbnailWidget(QtWidgets.QWidget): self.thumbnail_label = thumbnail_label self.default_pix = default_pix self.current_pix = None + + +class CreateNextPageOverlay(QtWidgets.QWidget): + max_value = 100.0 + clicked = QtCore.Signal() + + def __init__(self, parent): + super(CreateNextPageOverlay, self).__init__(parent) + + self._bg_color = QtGui.QColor(127, 127, 255) + self._arrow_color = QtGui.QColor(255, 255, 255) + + change_anim = QtCore.QVariantAnimation() + change_anim.setStartValue(0.0) + change_anim.setEndValue(self.max_value) + change_anim.setDuration(200) + change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + + change_anim.valueChanged.connect(self._on_anim) + + self._change_anim = change_anim + self._is_visible = None + self._anim_value = 0.0 + self._increasing = False + self._under_mouse = None + self._handle_show_on_own = True + self._mouse_pressed = False + self.set_visible(True) + + def set_increasing(self, increasing): + if self._increasing is increasing: + return + self._increasing = increasing + if increasing: + self._change_anim.setDirection(self._change_anim.Forward) + else: + self._change_anim.setDirection(self._change_anim.Backward) + + if self._change_anim.state() != self._change_anim.Running: + self._change_anim.start() + + def set_visible(self, visible): + if self._is_visible is visible: + return + + self._is_visible = visible + if not visible: + self.set_increasing(False) + if not self._is_anim_finished(): + return + + self.setVisible(visible) + self._check_anim_timer() + + def _is_anim_finished(self): + if self._increasing: + return self._anim_value == self.max_value + return self._anim_value == 0.0 + + def _on_anim(self, value): + self._check_anim_timer() + + self._anim_value = value + + self.update() + + if not self._is_anim_finished(): + return + + if not self._is_visible: + self.setVisible(False) + + def set_handle_show_on_own(self, handle): + if self._handle_show_on_own is handle: + return + self._handle_show_on_own = handle + self._under_mouse = None + self._check_anim_timer() + + def set_under_mouse(self, under_mouse): + if self._under_mouse is under_mouse: + return + + if self._handle_show_on_own: + self._handle_show_on_own = False + self._under_mouse = under_mouse + self.set_increasing(under_mouse) + + def _is_under_mouse(self): + mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) + under_mouse = self.rect().contains(mouse_pos) + return under_mouse + + def _check_anim_timer(self): + if not self.isVisible(): + return + + if self._handle_show_on_own: + under_mouse = self._is_under_mouse() + else: + under_mouse = self._under_mouse + + self.set_increasing(under_mouse) + + def enterEvent(self, event): + super(CreateNextPageOverlay, self).enterEvent(event) + if self._handle_show_on_own: + self._check_anim_timer() + + def leaveEvent(self, event): + super(CreateNextPageOverlay, self).leaveEvent(event) + if self._handle_show_on_own: + self._check_anim_timer() + + def mousePressEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self._mouse_pressed = True + super(CreateNextPageOverlay, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + if self._mouse_pressed: + self._mouse_pressed = False + if self.rect().contains(event.pos()): + self.clicked.emit() + + super(CreateNextPageOverlay, self).mouseReleaseEvent(event) + + def paintEvent(self, event): + painter = QtGui.QPainter() + painter.begin(self) + if self._anim_value == 0.0: + painter.end() + return + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + ) + + pen = QtGui.QPen() + pen.setWidth(0) + painter.setPen(pen) + rect = QtCore.QRect(self.rect()) + + offset = rect.width() - int( + float(rect.width()) * 0.01 * self._anim_value + ) + + pos_y = rect.center().y() + left = rect.left() + offset + right = rect.right() + top = rect.top() + bottom = rect.bottom() + width = right - left + height = bottom - top + + q_height = height * 0.15 + + arrow_half_height = width * 0.2 + arrow_x_start = left + (width * 0.4) + arrow_x_end = arrow_x_start + arrow_half_height + arrow_top_y_boundry = arrow_half_height + q_height + arrow_bottom_y_boundry = height - (arrow_half_height + q_height) + offset = 0 + if pos_y < arrow_top_y_boundry: + pos_y = arrow_top_y_boundry + elif pos_y > arrow_bottom_y_boundry: + pos_y = arrow_bottom_y_boundry + + top_cubic_y = pos_y - q_height + bottom_cubic_y = pos_y + q_height + + path = QtGui.QPainterPath() + path.moveTo(right, top) + path.lineTo(right, bottom) + + path.cubicTo( + right, bottom, + left, bottom_cubic_y, + left, pos_y + ) + path.cubicTo( + left, top_cubic_y, + right, top, + right, top + ) + path.closeSubpath() + + painter.fillPath(path, self._bg_color) + + src_arrow_path = QtGui.QPainterPath() + src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, pos_y) + src_arrow_path.lineTo(arrow_x_start, pos_y + arrow_half_height) + + arrow_stroker = QtGui.QPainterPathStroker() + arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) + arrow_path = arrow_stroker.createStroke(src_arrow_path) + + painter.fillPath(arrow_path, self._arrow_color) + + painter.end() From 30789058b34e0445da3c6a4a1bb12fafb073c3b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:23:38 +0100 Subject: [PATCH 088/409] overview widget can return global geo of subset view widget --- openpype/tools/publisher/widgets/overview_widget.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index be3839b90b..1c924d1631 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -195,6 +195,16 @@ class OverviewWidget(QtWidgets.QFrame): self._subset_views_widget.setMaximumWidth(view_width) self._change_anim.start() + def get_subset_views_geo(self): + parent = self._subset_views_widget.parent() + global_pos = parent.mapToGlobal(self._subset_views_widget.pos()) + return QtCore.QRect( + global_pos.x(), + global_pos.y(), + self._subset_views_widget.width(), + self._subset_views_widget.height() + ) + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. From 90d0dd718bce3a4537ffe8d2301484369cd67e84 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:24:35 +0100 Subject: [PATCH 089/409] prepared methods for set/check current tab --- openpype/tools/publisher/window.py | 31 +++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index d8a69bbeb0..7a0c34e298 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -403,7 +403,7 @@ class PublisherWindow(QtWidgets.QDialog): self._context_label.setText(label) def _update_publish_details_widget(self, force=False): - if not force and self._tabs_widget.current_tab() != "details": + if not force and not self._is_current_tab("details"): return report_data = self.controller.get_publish_report() @@ -434,7 +434,7 @@ class PublisherWindow(QtWidgets.QDialog): ) def _on_tab_change(self, old_tab, new_tab): - if old_tab == "details": + if old_tab != "details": self._publish_details_widget.close_details_popup() if new_tab in ("create", "publish"): @@ -463,14 +463,23 @@ class PublisherWindow(QtWidgets.QDialog): def _on_create_request(self): self._go_to_create_tab() + def _set_current_tab(self, identifier): + self._tabs_widget.set_current_tab(identifier) + + def _is_current_tab(self, identifier): + return self._tabs_widget.is_current_tab(identifier) + def _go_to_create_tab(self): - self._tabs_widget.set_current_tab("create") + self._set_current_tab("create") + + def _go_to_publish_tab(self): + self._set_current_tab("publish") def _go_to_details_tab(self): - self._tabs_widget.set_current_tab("details") + self._set_current_tab("details") def _go_to_report_tab(self): - self._tabs_widget.set_current_tab("report") + self._set_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -523,10 +532,10 @@ class PublisherWindow(QtWidgets.QDialog): self._set_footer_enabled(False) self._update_publish_details_widget() if ( - not self._tabs_widget.is_current_tab("create") - and not self._tabs_widget.is_current_tab("publish") + not self._is_current_tab("create") + and not self._is_current_tab("publish") ): - self._tabs_widget.set_current_tab("publish") + self._set_current_tab("publish") def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -542,8 +551,8 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_details_widget.close_details_popup() - if self._tabs_widget.is_current_tab(self._create_tab): - self._tabs_widget.set_current_tab("publish") + if self._is_current_tab(self._create_tab): + self._set_current_tab("publish") def _on_publish_validated_change(self, event): if event["value"]: @@ -556,7 +565,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._tabs_widget.is_current_tab("publish"): + if self._is_current_tab("publish"): self._go_to_report_tab() if validate_enabled: From ee94f7c46c707846277a8faf4fb3bcf6087f1edf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:24:56 +0100 Subject: [PATCH 090/409] added overlay widget and necessary parts to window --- openpype/tools/publisher/window.py | 85 +++++++++++++++++++++++++++--- 1 file changed, 78 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 7a0c34e298..ddac19f2e5 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -29,6 +29,8 @@ from .widgets import ( HelpButton, HelpDialog, + + CreateNextPageOverlay, ) @@ -225,8 +227,9 @@ class PublisherWindow(QtWidgets.QDialog): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - # Timer started on show -> connected to timer counter - # - helps to deffer on show logic by 3 event loops + create_overlay_button = CreateNextPageOverlay(self) + create_overlay_button.set_handle_show_on_own(False) + show_timer = QtCore.QTimer() show_timer.setInterval(1) show_timer.timeout.connect(self._on_show_timer) @@ -255,6 +258,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) + create_overlay_button.clicked.connect(self._go_to_publish_tab) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -310,6 +314,7 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_overlay = publish_overlay self._publish_frame = publish_frame + self._content_widget = content_widget self._content_stacked_layout = content_stacked_layout self._overview_widget = overview_widget @@ -342,6 +347,9 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) + self._create_overlay_button = create_overlay_button + self._app_event_listener_installed = False + self._show_timer = show_timer self._show_counter = 0 @@ -355,11 +363,38 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() + self._show_counter = 0 self._show_timer.start() def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() + self._update_create_overlay_size() + + def closeEvent(self, event): + self._uninstall_app_event_listener() + self.save_changes() + self._reset_on_show = True + super(PublisherWindow, self).closeEvent(event) + + def eventFilter(self, obj, event): + if event.type() == QtCore.QEvent.MouseMove: + self._update_create_overlay_visibility(event.globalPos()) + return super(PublisherWindow, self).eventFilter(obj, event) + + def _install_app_event_listener(self): + if self._app_event_listener_installed: + return + self._app_event_listener_installed = True + app = QtWidgets.QApplication.instance() + app.installEventFilter(self) + + def _uninstall_app_event_listener(self): + if not self._app_event_listener_installed: + return + self._app_event_listener_installed = False + app = QtWidgets.QApplication.instance() + app.removeEventFilter(self) def _on_overlay_message(self, event): self._overlay_object.add_message( @@ -383,16 +418,16 @@ class PublisherWindow(QtWidgets.QDialog): # Reset counter when done for next show event self._show_counter = 0 + self._update_create_overlay_size() + self._update_create_overlay_visibility() + if self._is_current_tab("create"): + self._install_app_event_listener() + # Reset if requested if self._reset_on_show: self._reset_on_show = False self.reset() - def closeEvent(self, event): - self.save_changes() - self._reset_on_show = True - super(PublisherWindow, self).closeEvent(event) - def save_changes(self): self._controller.save_changes() @@ -457,6 +492,13 @@ class PublisherWindow(QtWidgets.QDialog): self._report_widget ) + is_create = new_tab == "create" + if is_create: + self._install_app_event_listener() + else: + self._uninstall_app_event_listener() + self._create_overlay_button.set_visible(is_create) + def _on_context_or_active_change(self): self._validate_create_instances() @@ -669,6 +711,35 @@ class PublisherWindow(QtWidgets.QDialog): event["title"], new_failed_info, "Convertor:" ) + def _update_create_overlay_size(self): + height = self._content_widget.height() + metrics = self._create_overlay_button.fontMetrics() + width = int(metrics.height() * 3) + pos_x = self.width() - width + + tab_pos = self._tabs_widget.parent().mapTo( + self, self._tabs_widget.pos() + ) + tab_height = self._tabs_widget.height() + pos_y = tab_pos.y() + tab_height + + self._create_overlay_button.setGeometry( + pos_x, pos_y, + width, height + ) + + def _update_create_overlay_visibility(self, global_pos=None): + if global_pos is None: + global_pos = QtGui.QCursor.pos() + + under_mouse = False + my_pos = self.mapFromGlobal(global_pos) + if self.rect().contains(my_pos): + widget_geo = self._overview_widget.get_subset_views_geo() + widget_x = widget_geo.left() + (widget_geo.width() * 0.5) + under_mouse = widget_x < global_pos.x() + self._create_overlay_button.set_under_mouse(under_mouse) + class ErrorsMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, message_start, parent): From ea6e924dd95b86053092af0f08790b8e8a77be83 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:31:53 +0100 Subject: [PATCH 091/409] use gradient and different color --- openpype/tools/publisher/widgets/widgets.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 507ecedb0f..975a1faa06 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1661,7 +1661,6 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - self._bg_color = QtGui.QColor(127, 127, 255) self._arrow_color = QtGui.QColor(255, 255, 255) change_anim = QtCore.QVariantAnimation() @@ -1839,7 +1838,11 @@ class CreateNextPageOverlay(QtWidgets.QWidget): ) path.closeSubpath() - painter.fillPath(path, self._bg_color) + gradient = QtGui.QLinearGradient(left, pos_y, right, pos_y) + gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) + gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) + + painter.fillPath(path, gradient) src_arrow_path = QtGui.QPainterPath() src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) From 42b1012e7c320ec783df7ce5c76b76a24e18896e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 19:41:12 +0100 Subject: [PATCH 092/409] use radial gradient --- openpype/tools/publisher/widgets/widgets.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 975a1faa06..c4481d4d9d 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1838,7 +1838,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): ) path.closeSubpath() - gradient = QtGui.QLinearGradient(left, pos_y, right, pos_y) + radius = height * 0.7 + focal = QtCore.QPointF(left, pos_y) + start_p = QtCore.QPointF(right - (width * 0.5), pos_y) + gradient = QtGui.QRadialGradient(start_p, radius, focal) gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) From 17125a62edec43fe4c144485e584be964398aa41 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 31 Oct 2022 21:03:16 +0100 Subject: [PATCH 093/409] hiero: adding fallback if incompatible knobs from version to version --- openpype/hosts/hiero/api/pipeline.py | 6 +++--- openpype/hosts/hiero/plugins/load/load_effects.py | 7 ++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 26c8ebe6d3..c48d404ede 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -134,7 +134,7 @@ def ls(): for item in all_items: container_data = parse_container(item) - if isinstance(container, list): + if isinstance(container_data, list): for _c in container_data: yield _c @@ -187,7 +187,7 @@ def parse_container(item, validate=True): if type(item) == hiero.core.VideoTrack: return_list = [] _data = lib.get_track_openpype_data(item) - log.info("_data: {}".format(_data)) + if not _data: return # convert the data to list and validate them @@ -196,7 +196,7 @@ def parse_container(item, validate=True): return_list.append(cotnainer) return return_list else: - _data = lib.get_track_item_pype_data(item) + _data = lib.get_trackitem_openpype_data(item) return data_to_container(item, _data) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 16c9187ad9..d8a388c6ed 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -94,7 +94,12 @@ class LoadEffects(load.LoaderPlugin): or knob_name == "name" ): continue - node[knob_name].setValue(knob_value) + + try: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) # register all loaded children data_imprint["children_names"].append(new_name) From e4e6044198a7240e21387c2931926f7d0cffdbc2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 11:56:27 +0100 Subject: [PATCH 094/409] fix last pixel --- openpype/tools/publisher/widgets/widgets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index c4481d4d9d..b8fb2d38b9 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1800,9 +1800,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): pos_y = rect.center().y() left = rect.left() + offset - right = rect.right() top = rect.top() - bottom = rect.bottom() + # Right and bootm is pixel index + right = rect.right() + 1 + bottom = rect.bottom() + 1 width = right - left height = bottom - top From 049de296240198cdf296d0ff411c2601f1568589 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 11:57:32 +0100 Subject: [PATCH 095/409] handle leave event --- openpype/tools/publisher/window.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index ddac19f2e5..2063cdab96 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -377,6 +377,10 @@ class PublisherWindow(QtWidgets.QDialog): self._reset_on_show = True super(PublisherWindow, self).closeEvent(event) + def leaveEvent(self, event): + super(PublisherWindow, self).leaveEvent(event) + self._update_create_overlay_visibility() + def eventFilter(self, obj, event): if event.type() == QtCore.QEvent.MouseMove: self._update_create_overlay_visibility(event.globalPos()) From 72ce97a6285e1e31782b8c9a3c5e0d6bb49ab56c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:07:06 +0100 Subject: [PATCH 096/409] general: fixing loader for multiselection --- openpype/tools/loader/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/loader/widgets.py b/openpype/tools/loader/widgets.py index d37ce500e0..826c7110da 100644 --- a/openpype/tools/loader/widgets.py +++ b/openpype/tools/loader/widgets.py @@ -515,7 +515,7 @@ class SubsetWidget(QtWidgets.QWidget): if not one_item_selected: # Filter loaders from first subset by intersected combinations for repre, loader in first_loaders: - if (repre["name"], loader) not in found_combinations: + if (repre["name"].lower(), loader) not in found_combinations: continue loaders.append((repre, loader)) From 3dd115feef6c02e0effe7b44874c63056ed8a775 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:38:41 +0100 Subject: [PATCH 097/409] hiero: return specific container name --- openpype/hosts/hiero/api/lib.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index d04a710df1..e340209207 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -394,7 +394,7 @@ def get_track_openpype_tag(track): return tag -def get_track_openpype_data(track): +def get_track_openpype_data(track, container_name=None): """ Get track's openpype tag data. @@ -416,12 +416,16 @@ def get_track_openpype_data(track): for obj_name, obj_data in tag_data.items(): obj_name = obj_name.replace("tag.", "") - print(obj_name) + if obj_name in ["applieswhole", "note", "label"]: continue return_data[obj_name] = json.loads(obj_data) - return return_data + return ( + return_data[container_name] + if container_name + else return_data + ) @deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") From 393692559e3f57b5ed4db333e8c5e2c997801437 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:08 +0100 Subject: [PATCH 098/409] hiero: deep copy dicts --- openpype/hosts/hiero/api/pipeline.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index c48d404ede..3475bc62e4 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -1,6 +1,7 @@ """ Basic avalon integration """ +from copy import deepcopy import os import contextlib from collections import OrderedDict @@ -225,19 +226,19 @@ def update_container(item, data=None): if type(item) == hiero.core.VideoTrack: # form object data for test - object_name = "{}_{}".format( - data["name"], data["namespace"]) + object_name = data["objectName"] # get all available containers containers = lib.get_track_openpype_data(item) - for obj_name, container in containers.items(): - # ignore all which are not the same object - if object_name != obj_name: - continue - # update data in container - updated_container = update_container_data(container, data) - # merge updated container back to containers - containers.update(updated_container) + container = lib.get_track_openpype_data(item, object_name) + + containers = deepcopy(containers) + container = deepcopy(container) + + # update data in container + updated_container = update_container_data(container, data) + # merge updated container back to containers + containers.update({object_name: updated_container}) return bool(lib.set_track_openpype_tag(item, containers)) else: From 5b77f92d0bbf9cedb2f6c7b2a81964c45ccabd73 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:23 +0100 Subject: [PATCH 099/409] hiero: removing obsolete code --- openpype/hosts/hiero/api/tags.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 918af3dc1f..cb7bc14edb 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -86,12 +86,6 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # # due to hiero bug we have to make sure keys which are not existent in - # # data are cleared of value by `None` - # for _mk in mtd.dict().keys(): - # if _mk.replace("tag.", "") not in data_mtd.keys(): - # mtd.setValue(_mk, str(None)) - # set all data metadata to tag metadata for _k, _v in data_mtd.items(): value = str(_v) From 8c715a98aaa8bf4343b35f565400197ced021b0a Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:39:59 +0100 Subject: [PATCH 100/409] hiero: update effects finish --- .../hosts/hiero/plugins/load/load_effects.py | 145 ++++++++++++------ 1 file changed, 101 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index d8a388c6ed..3e5225ba22 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -1,11 +1,16 @@ import json from collections import OrderedDict -from pprint import pprint import six +from openpype.client import ( + get_version_by_id +) + from openpype.pipeline import ( AVALON_CONTAINER_ID, - load + load, + legacy_io, + get_representation_path ) from openpype.hosts.hiero import api as phiero @@ -40,18 +45,12 @@ class LoadEffects(load.LoaderPlugin): active_sequence, "LoadedEffects") # get main variables - version = context["version"] - version_data = version.get("data", {}) - vname = version.get("name", None) namespace = namespace or context["asset"]["name"] object_name = "{}_{}".format(name, namespace) clip_in = context["asset"]["data"]["clipIn"] clip_out = context["asset"]["data"]["clipOut"] data_imprint = { - "source": version_data["source"], - "version": vname, - "author": version_data["author"], "objectName": object_name, "children_names": [] } @@ -59,6 +58,31 @@ class LoadEffects(load.LoaderPlugin): # getting file path file = self.fname.replace("\\", "/") + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint + ): + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def _shared_loading( + self, + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=False + ): # getting data from json file with unicode conversion with open(file, "r") as f: json_f = {self.byteify(key): self.byteify(value) @@ -74,9 +98,6 @@ class LoadEffects(load.LoaderPlugin): loaded = False for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): - pprint("_" * 100) - pprint(ef_name) - pprint(ef_val) new_name = "{}_loaded".format(ef_name) if new_name not in used_subtracks: effect_track_item = active_track.createEffect( @@ -87,46 +108,82 @@ class LoadEffects(load.LoaderPlugin): ) effect_track_item.setName(new_name) - node = effect_track_item.node() - for knob_name, knob_value in ef_val["node"].items(): - if ( - not knob_value - or knob_name == "name" - ): - continue + else: + effect_track_item = used_subtracks[new_name] - try: - node[knob_name].setValue(knob_value) - except NameError: - self.log.warning("Knob: {} cannot be set".format( - knob_name)) + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue - # register all loaded children - data_imprint["children_names"].append(new_name) - # make sure containerisation will happen - loaded = True + try: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) - if not loaded: - return + # register all loaded children + data_imprint["children_names"].append(new_name) - self.containerise( - active_track, - name=name, - namespace=namespace, - object_name=object_name, - context=context, - loader=self.__class__.__name__, - data=data_imprint) + # make sure containerisation will happen + loaded = True + + return loaded def update(self, container, representation): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - + """ Updating previously loaded effects """ - pass + active_track = container["_item"] + file = get_representation_path(representation).replace("\\", "/") + + # get main variables + name = container['name'] + namespace = container['namespace'] + + # get timeline in out data + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + version_data = version_doc["data"] + clip_in = version_data["clipIn"] + clip_out = version_data["clipOut"] + + object_name = "{}_{}".format(name, namespace) + + # Disable previously created nodes + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + container = phiero.get_track_openpype_data( + active_track, object_name + ) + + loaded_subtrack_items = container["children_names"] + for loaded_stitem in loaded_subtrack_items: + if loaded_stitem not in used_subtracks: + continue + item_to_remove = used_subtracks.pop(loaded_stitem) + item_to_remove.node()["enable"].setValue(0) + + data_imprint = { + "objectName": object_name, + "name": name, + "representation": str(representation["_id"]), + "children_names": [] + } + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=True + ): + return phiero.update_container(active_track, data_imprint) def reorder_nodes(self, data): new_order = OrderedDict() From f3b038ec7df4e77be2a251d3c84722736dc832cc Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 1 Nov 2022 16:45:27 +0100 Subject: [PATCH 101/409] hiero: removing unused attribute --- openpype/hosts/hiero/plugins/load/load_effects.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 3e5225ba22..fab426e58d 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -25,7 +25,6 @@ class LoadEffects(load.LoaderPlugin): order = 0 icon = "cc" color = "white" - ignore_attr = ["useLifetime"] def load(self, context, name, namespace, data): """ From d655a53136e724179da0889d0e508b607d9d173c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 1 Nov 2022 18:45:07 +0100 Subject: [PATCH 102/409] use objected colors from styles --- openpype/tools/publisher/widgets/widgets.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index b8fb2d38b9..444ad4c7dc 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1661,7 +1661,15 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - self._arrow_color = QtGui.QColor(255, 255, 255) + self._arrow_color = ( + get_objected_colors("bg-buttons").get_qcolor() + ) + self._gradient_start_color = ( + get_objected_colors("publisher", "tab-bg").get_qcolor() + ) + self._gradient_end_color = ( + get_objected_colors("bg-inputs").get_qcolor() + ) change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) @@ -1843,8 +1851,8 @@ class CreateNextPageOverlay(QtWidgets.QWidget): focal = QtCore.QPointF(left, pos_y) start_p = QtCore.QPointF(right - (width * 0.5), pos_y) gradient = QtGui.QRadialGradient(start_p, radius, focal) - gradient.setColorAt(0, QtGui.QColor(22, 25, 29)) - gradient.setColorAt(1, QtGui.QColor(33, 37, 43)) + gradient.setColorAt(0, self._gradient_start_color) + gradient.setColorAt(1, self._gradient_end_color) painter.fillPath(path, gradient) From 7cf4e085f7c00ff8a9af2fdf538d7b0aed88f566 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:02 +0100 Subject: [PATCH 103/409] handle more types --- openpype/lib/transcoding.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index e736ba8ef0..4fc3a7ce94 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -111,6 +111,7 @@ def get_oiio_info_for_input(filepath, logger=None): class RationalToInt: """Rational value stored as division of 2 integers using string.""" + def __init__(self, string_value): parts = string_value.split("/") top = float(parts[0]) @@ -157,16 +158,16 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "int": return int(value) - if value_type == "float": + if value_type in ("float", "double"): return float(value) # Vectors will probably have more types - if value_type in ("vec2f", "float2"): + if value_type in ("vec2f", "float2", "float2d"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 # - are returned as list of lists - if value_type == "matrix": + if value_type in ("matrix", "matrixd"): output = [] current_index = -1 parts = value.split(",") @@ -198,7 +199,7 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "rational2i": return RationalToInt(value) - if value_type == "vector": + if value_type in ("vector", "vectord"): parts = [part.strip() for part in value.split(",")] output = [] for part in parts: From 7aca8136f5ba0ab22fd0e6084d5cc2901ac791a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:32 +0100 Subject: [PATCH 104/409] 'get_oiio_info_for_input' can return information about all subimages --- openpype/lib/transcoding.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 4fc3a7ce94..9d87818508 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -77,26 +77,38 @@ def get_transcode_temp_directory(): ) -def get_oiio_info_for_input(filepath, logger=None): +def get_oiio_info_for_input(filepath, logger=None, subimages=False): """Call oiiotool to get information about input and return stdout. Stdout should contain xml format string. """ args = [ - get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath + get_oiio_tools_path(), + "--info", + "-v" ] + if subimages: + args.append("-a") + + args.extend(["-i:infoformat=xml", filepath]) + output = run_subprocess(args, logger=logger) output = output.replace("\r\n", "\n") xml_started = False + subimages = [] lines = [] for line in output.split("\n"): if not xml_started: if not line.startswith("<"): continue xml_started = True + if xml_started: lines.append(line) + if line == "": + subimages.append(lines) + lines = [] if not xml_started: raise ValueError( @@ -105,8 +117,14 @@ def get_oiio_info_for_input(filepath, logger=None): ) ) - xml_text = "\n".join(lines) - return parse_oiio_xml_output(xml_text, logger=logger) + output = [] + for subimage in subimages: + xml_text = "\n".join(subimage) + output.append(parse_oiio_xml_output(xml_text, logger=logger)) + + if subimages: + return output + return output[0] class RationalToInt: From 61d9657ce16e9eb3b4a434368cb316d2bf8ac05a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:20:58 +0100 Subject: [PATCH 105/409] subimages are reason for conversion and skip definition of input channels if there are subimages --- openpype/lib/transcoding.py | 39 +++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 9d87818508..32c71fee7e 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -399,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath): if not input_info: return None + subimages = input_info.get("subimages") + if subimages is not None and subimages > 1: + return True + # Check compression compression = input_info["attribs"].get("compression") if compression in ("dwaa", "dwab"): @@ -507,13 +511,23 @@ def convert_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) - oiio_cmd.extend([ + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - "-i:ch={}".format(input_channels_str), first_input_path, + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + + oiio_cmd.extend([ + input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + # best candidate for output + "--subimage", "0" ]) # Add frame definitions to arguments @@ -631,6 +645,15 @@ def convert_input_paths_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to avoid memory + # leak issues + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + for input_path in input_paths: # Prepare subprocess arguments oiio_cmd = [ @@ -644,13 +667,13 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--compression", compression]) oiio_cmd.extend([ - # Tell oiiotool which channels should be loaded - # - other channels are not loaded to memory so helps to - # avoid memory leak issues - "-i:ch={}".format(input_channels_str), input_path, + input_arg, input_path, # Tell oiiotool which channels should be put to top stack # (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From f437ce7c983cd30a37c3ed697e73d670a79fa87f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:37:26 +0100 Subject: [PATCH 106/409] fix variable names --- openpype/lib/transcoding.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 32c71fee7e..1ab3cb4081 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -96,7 +96,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): output = output.replace("\r\n", "\n") xml_started = False - subimages = [] + subimages_lines = [] lines = [] for line in output.split("\n"): if not xml_started: @@ -107,7 +107,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): if xml_started: lines.append(line) if line == "": - subimages.append(lines) + subimages_lines.append(lines) lines = [] if not xml_started: @@ -118,8 +118,8 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): ) output = [] - for subimage in subimages: - xml_text = "\n".join(subimage) + for subimage_lines in subimages_lines: + xml_text = "\n".join(subimage_lines) output.append(parse_oiio_xml_output(xml_text, logger=logger)) if subimages: @@ -651,7 +651,7 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - # - this option is crashing if used on multipart/subimages exrs + # - this option is crashing if used on multipart exrs input_arg += ":ch={}".format(input_channels_str) for input_path in input_paths: From 560f327de1cbbff29db576c382f2191844855338 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:38:57 +0100 Subject: [PATCH 107/409] comment out subimage --- openpype/lib/transcoding.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 1ab3cb4081..af40fa752c 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -524,10 +524,10 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - # best candidate for output - "--subimage", "0" + # "--subimage", "0" ]) # Add frame definitions to arguments @@ -671,9 +671,10 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - "--subimage", "0" + # "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From ebbf827f0866b05d3d0915a6cb7f86f1bf814fa6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:43:11 +0100 Subject: [PATCH 108/409] fix line length --- openpype/lib/transcoding.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index af40fa752c..5a57026496 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -672,7 +672,8 @@ def convert_input_paths_for_ffmpeg( # (and output) "--ch", channels_arg, # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio + # render proper output when only one subimage is outputed + # with oiiotool # Use first subimage # "--subimage", "0" ]) From 372c6d89c37e1d67aea8caab69667d55e5b6f34d Mon Sep 17 00:00:00 2001 From: 2-REC Date: Fri, 4 Nov 2022 15:26:33 +0700 Subject: [PATCH 109/409] Setting from other plugin --- .../publish/validate_texture_workfiles.py | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index 56ea82f6b6..a25b80438d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,5 +1,7 @@ +import os import pyblish.api +from openpype.settings import get_project_settings from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -18,23 +20,40 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True + #TODO(2-rec): remove/change comment # from presets main_workfile_extensions = ['mra'] def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] - if ext not in self.main_workfile_extensions: + main_workfile_extensions = self.get_main_workfile_extensions() + if ext not in main_workfile_extensions: self.log.warning("Only secondary workfile present!") return if not instance.data.get("resources"): msg = "No secondary workfile present for workfile '{}'". \ format(instance.data["name"]) - ext = self.main_workfile_extensions[0] + ext = main_workfile_extensions[0] formatting_data = {"file_name": instance.data["name"], "extension": ext} raise PublishXmlValidationError(self, msg, formatting_data=formatting_data ) + + @classmethod + def get_main_workfile_extensions(cls): + project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) + + #TODO: find better way? (depends on other plugin) + try: + extensions = (project_settings["standalonepublisher"] + ["publish"] + ["CollectTextures"] + ["main_workfile_extensions"]) + except KeyError: + extensions = cls.main_workfile_extensions + + return extensions From befd6889ccf35216e1153eec5742d0b16edcceed Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:25:12 +0100 Subject: [PATCH 110/409] use much simpler UI for the button --- openpype/tools/publisher/widgets/widgets.py | 112 +++++--------------- openpype/tools/publisher/window.py | 19 ++-- 2 files changed, 35 insertions(+), 96 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 444ad4c7dc..a180107380 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1660,22 +1660,19 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def __init__(self, parent): super(CreateNextPageOverlay, self).__init__(parent) - + self.setCursor(QtCore.Qt.PointingHandCursor) self._arrow_color = ( get_objected_colors("bg-buttons").get_qcolor() ) - self._gradient_start_color = ( + self._bg_color = ( get_objected_colors("publisher", "tab-bg").get_qcolor() ) - self._gradient_end_color = ( - get_objected_colors("bg-inputs").get_qcolor() - ) change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) change_anim.setEndValue(self.max_value) - change_anim.setDuration(200) - change_anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) + change_anim.setDuration(400) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutBounce) change_anim.valueChanged.connect(self._on_anim) @@ -1731,19 +1728,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if not self._is_visible: self.setVisible(False) - def set_handle_show_on_own(self, handle): - if self._handle_show_on_own is handle: - return - self._handle_show_on_own = handle - self._under_mouse = None - self._check_anim_timer() - def set_under_mouse(self, under_mouse): if self._under_mouse is under_mouse: return - if self._handle_show_on_own: - self._handle_show_on_own = False self._under_mouse = under_mouse self.set_increasing(under_mouse) @@ -1756,22 +1744,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if not self.isVisible(): return - if self._handle_show_on_own: - under_mouse = self._is_under_mouse() - else: - under_mouse = self._under_mouse - - self.set_increasing(under_mouse) - - def enterEvent(self, event): - super(CreateNextPageOverlay, self).enterEvent(event) - if self._handle_show_on_own: - self._check_anim_timer() - - def leaveEvent(self, event): - super(CreateNextPageOverlay, self).leaveEvent(event) - if self._handle_show_on_own: - self._check_anim_timer() + self.set_increasing(self._under_mouse) def mousePressEvent(self, event): if event.button() == QtCore.Qt.LeftButton: @@ -1792,74 +1765,41 @@ class CreateNextPageOverlay(QtWidgets.QWidget): if self._anim_value == 0.0: painter.end() return + + painter.setClipRect(event.rect()) painter.setRenderHints( painter.Antialiasing | painter.SmoothPixmapTransform ) - pen = QtGui.QPen() - pen.setWidth(0) - painter.setPen(pen) + painter.setPen(QtCore.Qt.NoPen) + rect = QtCore.QRect(self.rect()) + rect_width = rect.width() + rect_height = rect.height() - offset = rect.width() - int( - float(rect.width()) * 0.01 * self._anim_value - ) + size = rect_width * 0.9 - pos_y = rect.center().y() - left = rect.left() + offset - top = rect.top() - # Right and bootm is pixel index - right = rect.right() + 1 - bottom = rect.bottom() + 1 - width = right - left - height = bottom - top + x_offset = (rect_width - size) * 0.5 + y_offset = (rect_height - size) * 0.5 + if self._anim_value != self.max_value: + x_offset += rect_width - (rect_width * 0.01 * self._anim_value) - q_height = height * 0.15 - - arrow_half_height = width * 0.2 - arrow_x_start = left + (width * 0.4) + arrow_half_height = size * 0.2 + arrow_x_start = x_offset + (size * 0.4) arrow_x_end = arrow_x_start + arrow_half_height - arrow_top_y_boundry = arrow_half_height + q_height - arrow_bottom_y_boundry = height - (arrow_half_height + q_height) - offset = 0 - if pos_y < arrow_top_y_boundry: - pos_y = arrow_top_y_boundry - elif pos_y > arrow_bottom_y_boundry: - pos_y = arrow_bottom_y_boundry + center_y = rect.center().y() - top_cubic_y = pos_y - q_height - bottom_cubic_y = pos_y + q_height - - path = QtGui.QPainterPath() - path.moveTo(right, top) - path.lineTo(right, bottom) - - path.cubicTo( - right, bottom, - left, bottom_cubic_y, - left, pos_y + painter.setBrush(self._bg_color) + painter.drawEllipse( + x_offset, y_offset, + size, size ) - path.cubicTo( - left, top_cubic_y, - right, top, - right, top - ) - path.closeSubpath() - - radius = height * 0.7 - focal = QtCore.QPointF(left, pos_y) - start_p = QtCore.QPointF(right - (width * 0.5), pos_y) - gradient = QtGui.QRadialGradient(start_p, radius, focal) - gradient.setColorAt(0, self._gradient_start_color) - gradient.setColorAt(1, self._gradient_end_color) - - painter.fillPath(path, gradient) src_arrow_path = QtGui.QPainterPath() - src_arrow_path.moveTo(arrow_x_start, pos_y - arrow_half_height) - src_arrow_path.lineTo(arrow_x_end, pos_y) - src_arrow_path.lineTo(arrow_x_start, pos_y + arrow_half_height) + src_arrow_path.moveTo(arrow_x_start, center_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, center_y) + src_arrow_path.lineTo(arrow_x_start, center_y + arrow_half_height) arrow_stroker = QtGui.QPainterPathStroker() arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 2063cdab96..82a2576ff4 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -228,7 +228,6 @@ class PublisherWindow(QtWidgets.QDialog): publish_frame = PublishFrame(controller, self.footer_border, self) create_overlay_button = CreateNextPageOverlay(self) - create_overlay_button.set_handle_show_on_own(False) show_timer = QtCore.QTimer() show_timer.setInterval(1) @@ -716,20 +715,20 @@ class PublisherWindow(QtWidgets.QDialog): ) def _update_create_overlay_size(self): - height = self._content_widget.height() metrics = self._create_overlay_button.fontMetrics() - width = int(metrics.height() * 3) - pos_x = self.width() - width + size = int(metrics.height() * 3) + end_pos_x = self.width() + start_pos_x = end_pos_x - size - tab_pos = self._tabs_widget.parent().mapTo( - self, self._tabs_widget.pos() + center = self._content_widget.parent().mapTo( + self, + self._content_widget.rect().center() ) - tab_height = self._tabs_widget.height() - pos_y = tab_pos.y() + tab_height + pos_y = center.y() - (size * 0.5) self._create_overlay_button.setGeometry( - pos_x, pos_y, - width, height + start_pos_x, pos_y, + size, size ) def _update_create_overlay_visibility(self, global_pos=None): From 9ec78651547738a2d2ed3cf266ebb9428b44a6b6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:32:09 +0100 Subject: [PATCH 111/409] removred unnecessary restart --- openpype/tools/publisher/window.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 7cf3ae0da8..0daa31938d 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -362,7 +362,6 @@ class PublisherWindow(QtWidgets.QDialog): self._first_show = False self._on_first_show() - self._show_counter = 0 self._show_timer.start() def resizeEvent(self, event): From a852973e1139e5f2bba380f5c1e103ab3a817a54 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 4 Nov 2022 21:32:56 +0100 Subject: [PATCH 112/409] fix details dialog close --- openpype/tools/publisher/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 0daa31938d..281c7ad2a1 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -472,7 +472,7 @@ class PublisherWindow(QtWidgets.QDialog): ) def _on_tab_change(self, old_tab, new_tab): - if old_tab != "details": + if old_tab == "details": self._publish_details_widget.close_details_popup() if new_tab in ("create", "publish"): From 017ec79552eeb000edc6159960867dc781275655 Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 7 Nov 2022 23:20:29 +0100 Subject: [PATCH 113/409] change colors --- openpype/tools/publisher/widgets/widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 6c8ee3b332..ece27cd8cc 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1718,10 +1718,10 @@ class CreateNextPageOverlay(QtWidgets.QWidget): super(CreateNextPageOverlay, self).__init__(parent) self.setCursor(QtCore.Qt.PointingHandCursor) self._arrow_color = ( - get_objected_colors("bg-buttons").get_qcolor() + get_objected_colors("font").get_qcolor() ) self._bg_color = ( - get_objected_colors("publisher", "tab-bg").get_qcolor() + get_objected_colors("bg-buttons").get_qcolor() ) change_anim = QtCore.QVariantAnimation() From b75356d631f26048330e65ff24e78107dc0bbd0c Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Mon, 7 Nov 2022 23:20:35 +0100 Subject: [PATCH 114/409] change easing curve --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ece27cd8cc..f170992c1a 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1728,7 +1728,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim.setStartValue(0.0) change_anim.setEndValue(self.max_value) change_anim.setDuration(400) - change_anim.setEasingCurve(QtCore.QEasingCurve.OutBounce) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) change_anim.valueChanged.connect(self._on_anim) From 94114d5ed0ecb4c785403c99e55a94f9b2f3cb6b Mon Sep 17 00:00:00 2001 From: clement hector Date: Tue, 8 Nov 2022 11:21:52 +0100 Subject: [PATCH 115/409] add instance name and extension checks to filter only reviewMain file --- .../kitsu/plugins/publish/integrate_kitsu_review.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf80095225..61d5a13660 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +import os import gazu import pyblish.api @@ -31,9 +32,13 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): continue review_path = representation.get("published_path") + file_name, file_extension = os.path.splitext(review_path) + + if instance.data.get('name') != 'reviewMain' \ + or file_extension != '.mp4': + continue self.log.debug("Found review at: {}".format(review_path)) - gazu.task.add_preview( task, comment, review_path, normalize_movie=True ) From 3dbfa8ee5143d411adf6bbe2357966078cb819e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 8 Nov 2022 16:30:40 +0100 Subject: [PATCH 116/409] removed max value and use 1.0 --- openpype/tools/publisher/widgets/widgets.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index f170992c1a..7ab6294817 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1711,7 +1711,6 @@ class SubsetAttributesWidget(QtWidgets.QWidget): class CreateNextPageOverlay(QtWidgets.QWidget): - max_value = 100.0 clicked = QtCore.Signal() def __init__(self, parent): @@ -1726,7 +1725,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) - change_anim.setEndValue(self.max_value) + change_anim.setEndValue(1.0) change_anim.setDuration(400) change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) @@ -1768,7 +1767,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): def _is_anim_finished(self): if self._increasing: - return self._anim_value == self.max_value + return self._anim_value == 1.0 return self._anim_value == 0.0 def _on_anim(self, value): @@ -1838,8 +1837,8 @@ class CreateNextPageOverlay(QtWidgets.QWidget): x_offset = (rect_width - size) * 0.5 y_offset = (rect_height - size) * 0.5 - if self._anim_value != self.max_value: - x_offset += rect_width - (rect_width * 0.01 * self._anim_value) + if self._anim_value != 1.0: + x_offset += rect_width - (rect_width * self._anim_value) arrow_half_height = size * 0.2 arrow_x_start = x_offset + (size * 0.4) From cf50722e1fee7c6ab227dedefc74a479713264fb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 8 Nov 2022 21:42:26 +0100 Subject: [PATCH 117/409] flame: load with native colorspace resolved from mapping --- openpype/hosts/flame/api/plugin.py | 13 +++++++++++++ openpype/hosts/flame/plugins/load/load_clip.py | 4 ++-- .../hosts/flame/plugins/load/load_clip_batch.py | 4 ++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 092ce9d106..45fa7fd9a4 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -690,6 +690,19 @@ class ClipLoader(LoaderPlugin): ) ] + _mapping = None + + def get_native_colorspace(self, input_colorspace): + if not self._mapping: + settings = get_current_project_settings()["flame"] + mapping = settings["imageio"]["profilesMapping"]["inputs"] + self._mapping = { + input["ocioName"]: input["flameName"] + for input in mapping + } + + return self._mapping.get(input_colorspace) + class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 0843dde76a..23879b923e 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -40,10 +40,10 @@ class LoadClip(opfapi.ClipLoader): clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 3b049b861b..2de75df116 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -43,10 +43,10 @@ class LoadClipBatch(opfapi.ClipLoader): clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] From cc7a3e8581293e7fa2c3a678a43ca9c579c40e0e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 18:41:48 +0800 Subject: [PATCH 118/409] adding the switching on off for multipart and force muiltilayer options --- openpype/hosts/maya/api/lib_rendersettings.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2b996702c3..2fc7547c8c 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -154,6 +154,16 @@ class RenderSettings(object): self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) + if redshift_render_presets["multilayer_exr"]: + cmds.setAttr("redshiftOptions.exrMultipart", 1) + else: + cmds.setAttr("redshiftOptions.exrMultipart", 0) + + if redshift_render_presets["force_combine"]: + cmds.setAttr("redshiftOptions.exrForceMultilayer", 1) + else: + cmds.setAttr("redshiftOptions.exrForceMultilayer", 0) + cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) self._additional_attribs_setter(additional_options) From 3ee386543bc6b91ee7a0ab3c95424ac7955d7d98 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 9 Nov 2022 11:43:55 +0100 Subject: [PATCH 119/409] hiero: adding animated knobs also making track per subset --- .../hosts/hiero/plugins/load/load_effects.py | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index fab426e58d..0819d1d1b7 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -13,6 +13,7 @@ from openpype.pipeline import ( get_representation_path ) from openpype.hosts.hiero import api as phiero +from openpype.lib import Logger class LoadEffects(load.LoaderPlugin): @@ -26,6 +27,8 @@ class LoadEffects(load.LoaderPlugin): icon = "cc" color = "white" + log = Logger.get_logger(__name__) + def load(self, context, name, namespace, data): """ Loading function to get the soft effects to particular read node @@ -41,7 +44,7 @@ class LoadEffects(load.LoaderPlugin): """ active_sequence = phiero.get_current_sequence() active_track = phiero.get_current_track( - active_sequence, "LoadedEffects") + active_sequence, "Loaded_{}".format(name)) # get main variables namespace = namespace or context["asset"]["name"] @@ -119,7 +122,27 @@ class LoadEffects(load.LoaderPlugin): continue try: - node[knob_name].setValue(knob_value) + # assume list means animation + # except 4 values could be RGBA or vector + if isinstance(knob_value, list) and len(knob_value) > 4: + node[knob_name].setAnimated() + for i, value in enumerate(knob_value): + if isinstance(value, list): + # list can have vector animation + for ci, cv in enumerate(value): + node[knob_name].setValueAt( + cv, + (clip_in + i), + ci + ) + else: + # list is single values + node[knob_name].setValueAt( + value, + (clip_in + i) + ) + else: + node[knob_name].setValue(knob_value) except NameError: self.log.warning("Knob: {} cannot be set".format( knob_name)) From 756bb9d85acf7d8d286eb21ca205185d0a18eed1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 9 Nov 2022 16:26:44 +0100 Subject: [PATCH 120/409] hiero: improving management of versions --- openpype/hosts/hiero/api/lib.py | 10 ++++++++-- openpype/hosts/hiero/api/pipeline.py | 21 ++++++++++++--------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e340209207..2829fe2bf5 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -11,6 +11,7 @@ import functools import warnings import json import ast +import secrets import shutil import hiero @@ -350,6 +351,8 @@ def set_track_openpype_tag(track, data=None): Returns: hiero.core.Tag """ + hash = secrets.token_hex(nbytes=4) + data = data or {} # basic Tag's attribute @@ -367,7 +370,10 @@ def set_track_openpype_tag(track, data=None): tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format(self.pype_tag_name, hash), + tag_data + ) # add it to the input track item track.addTag(tag) @@ -390,7 +396,7 @@ def get_track_openpype_tag(track): return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index 3475bc62e4..4ab73e7d19 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -201,6 +201,15 @@ def parse_container(item, validate=True): return data_to_container(item, _data) +def _update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass + return container + + def update_container(item, data=None): """Update container data to input track_item or track's openpype tag. @@ -214,15 +223,9 @@ def update_container(item, data=None): bool: True if container was updated correctly """ - def update_container_data(container, data): - for key in container: - try: - container[key] = data[key] - except KeyError: - pass - return container data = data or {} + data = deepcopy(data) if type(item) == hiero.core.VideoTrack: # form object data for test @@ -236,14 +239,14 @@ def update_container(item, data=None): container = deepcopy(container) # update data in container - updated_container = update_container_data(container, data) + updated_container = _update_container_data(container, data) # merge updated container back to containers containers.update({object_name: updated_container}) return bool(lib.set_track_openpype_tag(item, containers)) else: container = lib.get_trackitem_openpype_data(item) - updated_container = update_container_data(container, data) + updated_container = _update_container_data(container, data) log.info("Updating container: `{}`".format(item.name())) return bool(lib.set_trackitem_openpype_tag(item, updated_container)) From 9996c3f1afbe2e2b3adb110382586ffefd82a3ae Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 13:44:20 +0800 Subject: [PATCH 121/409] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 3 ++- openpype/hosts/maya/api/lib_rendersettings.py | 10 ---------- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++++---- vendor/configs/OpenColorIO-Configs | 1 + 4 files changed, 9 insertions(+), 15 deletions(-) create mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index cd204445b7..ef75391638 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1016,7 +1016,8 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) or \ + bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2fc7547c8c..2b996702c3 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -154,16 +154,6 @@ class RenderSettings(object): self._set_global_output_settings() cmds.setAttr("redshiftOptions.imageFormat", img_ext) - if redshift_render_presets["multilayer_exr"]: - cmds.setAttr("redshiftOptions.exrMultipart", 1) - else: - cmds.setAttr("redshiftOptions.exrMultipart", 0) - - if redshift_render_presets["force_combine"]: - cmds.setAttr("redshiftOptions.exrForceMultilayer", 1) - else: - cmds.setAttr("redshiftOptions.exrForceMultilayer", 0) - cmds.setAttr("defaultResolution.width", width) cmds.setAttr("defaultResolution.height", height) self._additional_attribs_setter(additional_options) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 35f2532c16..615be78794 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -494,12 +494,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - preview = match_aov_pattern(app, aov_patterns, render_file_name) - + self.log.info("aov_pattern:{}".format(aov_patterns)) # toggle preview on if multipart is on - if instance_data.get("multipartExr"): + preview = match_aov_pattern(app, aov_patterns, render_file_name) + #if instance_data.get("multipartExr"): + if "Cryptomatte" in render_file_name: # for redshift preview = True + self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -542,7 +544,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - + self.log.info("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs new file mode 160000 index 0000000000..0bb079c08b --- /dev/null +++ b/vendor/configs/OpenColorIO-Configs @@ -0,0 +1 @@ +Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From 7e2ba84911dec742654ab07f28062c0ccbf0a731 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 13:57:15 +0800 Subject: [PATCH 122/409] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 3 ++- .../modules/deadline/plugins/publish/submit_publish_job.py | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index ef75391638..f89441cfc7 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1016,7 +1016,8 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) or \ + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ + or \ bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 615be78794..18fc769d49 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -497,7 +497,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): self.log.info("aov_pattern:{}".format(aov_patterns)) # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) - #if instance_data.get("multipartExr"): if "Cryptomatte" in render_file_name: # for redshift preview = True From 252859ce0206a011828a1314e1530dbc12db5ea7 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 18:03:46 +0800 Subject: [PATCH 123/409] AOV Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 6 ++++-- .../modules/deadline/plugins/publish/submit_publish_job.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index f89441cfc7..a95c1c4932 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,6 +536,7 @@ class RenderProductsArnold(ARenderProducts): products = [] aov_name = self._get_attr(aov, "name") + multipart = bool(self._get_attr("defaultArnoldDriver.multipart")) ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -589,6 +590,7 @@ class RenderProductsArnold(ARenderProducts): ext=ext, aov=aov_name, driver=ai_driver, + multipart=multipart, camera=camera) products.append(product) @@ -1016,9 +1018,9 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file + multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ - or \ - bool(self._get_attr("redshiftOptions.exrMultipart")) + or bool(self._get_attr("redshiftOptions.exrMultipart")) # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 18fc769d49..27400bb269 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -494,13 +494,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - self.log.info("aov_pattern:{}".format(aov_patterns)) + # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) - if "Cryptomatte" in render_file_name: # for redshift + + if instance_data.get("multipartExr"): preview = True - self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name From 3cd1918f04ef5c13ab10e003064699b1659f8fb0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 11:23:32 +0100 Subject: [PATCH 124/409] shorter animation --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index a33e6e7565..71f476c4ef 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1726,7 +1726,7 @@ class CreateNextPageOverlay(QtWidgets.QWidget): change_anim = QtCore.QVariantAnimation() change_anim.setStartValue(0.0) change_anim.setEndValue(1.0) - change_anim.setDuration(400) + change_anim.setDuration(200) change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) change_anim.valueChanged.connect(self._on_anim) From ddd4e653919adfe58b10caa857948ccf98066868 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 13:39:01 +0100 Subject: [PATCH 125/409] hiero: unification of openpype tags --- openpype/hosts/hiero/api/lib.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index 2829fe2bf5..7f0cf8149a 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -341,6 +341,11 @@ def get_track_item_tags(track_item): return returning_tag_data +def _get_tag_unique_hash(): + # sourcery skip: avoid-builtin-shadow + return secrets.token_hex(nbytes=4) + + def set_track_openpype_tag(track, data=None): """ Set openpype track tag to input track object. @@ -351,8 +356,6 @@ def set_track_openpype_tag(track, data=None): Returns: hiero.core.Tag """ - hash = secrets.token_hex(nbytes=4) - data = data or {} # basic Tag's attribute @@ -371,7 +374,10 @@ def set_track_openpype_tag(track, data=None): else: # if pype tag available then update with input data tag = tags.create_tag( - "{}_{}".format(self.pype_tag_name, hash), + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), tag_data ) # add it to the input track item @@ -468,7 +474,7 @@ def get_trackitem_openpype_tag(track_item): return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag @@ -493,13 +499,18 @@ def set_trackitem_openpype_tag(track_item, data=None): } # get available pype tag if any _tag = get_trackitem_openpype_tag(track_item) - if _tag: # it not tag then create one tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) # add it to the input track item track_item.addTag(tag) From c5d3e8a45788ce03c996096f5af89df967e735a0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 13:39:25 +0100 Subject: [PATCH 126/409] hiero: loading effects not able delete previous nodes --- openpype/hosts/hiero/plugins/load/load_effects.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py index 0819d1d1b7..a3fcd63b5b 100644 --- a/openpype/hosts/hiero/plugins/load/load_effects.py +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -188,7 +188,9 @@ class LoadEffects(load.LoaderPlugin): if loaded_stitem not in used_subtracks: continue item_to_remove = used_subtracks.pop(loaded_stitem) - item_to_remove.node()["enable"].setValue(0) + # TODO: find a way to erase nodes + self.log.debug( + "This node needs to be removed: {}".format(item_to_remove)) data_imprint = { "objectName": object_name, From 79eb997e4b7d49510615606cb6fa1c05ddec67d7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 14:19:49 +0100 Subject: [PATCH 127/409] flame: convert color mapping to classmethod --- openpype/hosts/flame/api/plugin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 45fa7fd9a4..9efbd5c1bc 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -692,16 +692,17 @@ class ClipLoader(LoaderPlugin): _mapping = None - def get_native_colorspace(self, input_colorspace): - if not self._mapping: + @classmethod + def get_native_colorspace(cls, input_colorspace): + if not cls._mapping: settings = get_current_project_settings()["flame"] mapping = settings["imageio"]["profilesMapping"]["inputs"] - self._mapping = { + cls._mapping = { input["ocioName"]: input["flameName"] for input in mapping } - return self._mapping.get(input_colorspace) + return cls._mapping.get(input_colorspace) class OpenClipSolver(flib.MediaInfoFile): From 0f392dd99455eec17b81a73ac7894d3286d7fa17 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 10 Nov 2022 17:11:42 +0100 Subject: [PATCH 128/409] falame: better colorspace loading --- openpype/hosts/flame/api/plugin.py | 38 ++++++++++++++++++- .../hosts/flame/plugins/load/load_clip.py | 3 +- .../flame/plugins/load/load_clip_batch.py | 2 +- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 9efbd5c1bc..26129ebaa6 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -4,13 +4,13 @@ import shutil from copy import deepcopy from xml.etree import ElementTree as ET +import qargparse from Qt import QtCore, QtWidgets -import qargparse from openpype import style -from openpype.settings import get_current_project_settings from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin +from openpype.settings import get_current_project_settings from . import constants from . import lib as flib @@ -692,8 +692,42 @@ class ClipLoader(LoaderPlugin): _mapping = None + def get_colorspace(self, context): + """Get colorspace name + + Look either to version data or representation data. + + Args: + context (dict): version context data + + Returns: + str: colorspace name or None + """ + version = context['version'] + version_data = version.get("data", {}) + colorspace = version_data.get( + "colorspace", None + ) + + if ( + not colorspace + or colorspace == "Unknown" + ): + colorspace = context["representation"]["data"].get( + "colorspace", None) + + return colorspace + @classmethod def get_native_colorspace(cls, input_colorspace): + """Return native colorspace name. + + Args: + input_colorspace (str | None): colorspace name + + Returns: + str: native colorspace name defined in mapping or None + """ if not cls._mapping: settings = get_current_project_settings()["flame"] mapping = settings["imageio"]["profilesMapping"]["inputs"] diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 23879b923e..f8cb7b3e11 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -36,7 +36,8 @@ class LoadClip(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) + clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 19c0ed1ef0..048ac19431 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -35,7 +35,7 @@ class LoadClipBatch(opfapi.ClipLoader): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): From 9a722cb8bb8acd5deb744acfd11fab3528ae6289 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 10 Nov 2022 17:30:10 +0100 Subject: [PATCH 129/409] :art: creator for online family --- .../plugins/create/create_online.py | 98 +++++++++++++++++++ .../plugins/publish/collect_online_file.py | 24 +++++ 2 files changed, 122 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/create/create_online.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_online_file.py diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py new file mode 100644 index 0000000000..e8092e8eaf --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +"""Creator of online files. + +Online file retain their original name and use it as subset name. To +avoid conflicts, this creator checks if subset with this name already +exists under selected asset. +""" +import copy +import os +import re +from pathlib import Path + +from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.lib.attribute_definitions import FileDef +from openpype.pipeline import ( + CreatedInstance, + CreatorError +) +from openpype.pipeline.create import ( + get_subset_name, + TaskNotSetError, +) + +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator + + +class OnlineCreator(TrayPublishCreator): + """Creates instance from file and retains its original name.""" + + identifier = "io.openpype.creators.traypublisher.online" + label = "Online" + family = "online" + description = "Publish file retaining its original file name" + extensions = [".mov", ".mp4", ".mfx", ".m4v", ".mpg"] + + def get_detail_description(self): + return """# Publish batch of .mov to multiple assets. + + File names must then contain only asset name, or asset name + version. + (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` + """ + + def get_icon(self): + return "fa.file" + + def create(self, subset_name, instance_data, pre_create_data): + if not pre_create_data.get("representation_file")["filenames"]: + raise CreatorError("No files specified") + + asset = get_asset_by_name(self.project_name, instance_data["asset"]) + origin_basename = Path(pre_create_data.get( + "representation_file")["filenames"][0]).stem + + if get_subset_by_name( + self.project_name, origin_basename, asset["_id"]): + raise CreatorError(f"subset with {origin_basename} already " + "exists in selected asset") + + instance_data["originalBasename"] = origin_basename + subset_name = origin_basename + path = (Path( + pre_create_data.get( + "representation_file")["directory"] + ) / pre_create_data.get( + "representation_file")["filenames"][0]).as_posix() + + instance_data["creator_attributes"] = {"path": path} + + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, + instance_data, self) + self._store_new_instance(new_instance) + + def get_pre_create_attr_defs(self): + return [ + FileDef( + "representation_file", + folders=False, + extensions=self.extensions, + allow_sequences=False, + single_item=True, + label="Representation", + ) + ] + + def get_subset_name( + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None + ): + if instance is None: + return "{originalBasename}" + + return instance.data["subset"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py new file mode 100644 index 0000000000..1d173c326b --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from pathlib import Path + + +class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): + """Collect online file and retain its file name.""" + label = "Collect online file" + families = ["online"] + hosts = ["traypublisher"] + + def process(self, instance): + file = Path(instance.data["creator_attributes"]["path"]) + + if not instance.data.get("representations"): + instance.data["representations"] = [ + { + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() + } + ] + From 2b8846766f8cb65f9a6f7528c15ae840849097e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 10 Nov 2022 17:30:57 +0100 Subject: [PATCH 130/409] :art: defaults for online family --- .../defaults/project_anatomy/templates.json | 8 +++++++- .../settings/defaults/project_settings/global.json | 14 +++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_anatomy/templates.json b/openpype/settings/defaults/project_anatomy/templates.json index 3415c4451f..0ac56a4dad 100644 --- a/openpype/settings/defaults/project_anatomy/templates.json +++ b/openpype/settings/defaults/project_anatomy/templates.json @@ -48,10 +48,16 @@ "file": "{originalBasename}_{@version}.{ext}", "path": "{@folder}/{@file}" }, + "online": { + "folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}", + "file": "{originalBasename}<.{@frame}><_{udim}>.{ext}", + "path": "{@folder}/{@file}" + }, "__dynamic_keys_labels__": { "maya2unreal": "Maya to Unreal", "simpleUnrealTextureHero": "Simple Unreal Texture - Hero", - "simpleUnrealTexture": "Simple Unreal Texture" + "simpleUnrealTexture": "Simple Unreal Texture", + "online": "online" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9c3f2f1e1b..0409ce802c 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -483,7 +483,19 @@ ] }, "publish": { - "template_name_profiles": [], + "template_name_profiles": [ + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "task_names": [], + "template_name": "online" + } + ], "hero_template_name_profiles": [] } }, From 81451300611b4eb7aab753ad1267848ec1965e72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 11 Nov 2022 10:00:16 +0100 Subject: [PATCH 131/409] :label: fix type hint --- openpype/pipeline/create/creator_plugins.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 782534d589..bb5ce00452 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -393,8 +393,9 @@ class BaseCreator: asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. - instance(str|None): Object of 'CreatedInstance' for which is - subset name updated. Passed only on subset name update. + instance(CreatedInstance|None): Object of 'CreatedInstance' for + which is subset name updated. Passed only on subset name + update. """ dynamic_data = self.get_dynamic_data( From 2edcb15fbb1640dd57286d83828d9bb05e908c42 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 17:55:26 +0800 Subject: [PATCH 132/409] fixing te multipart boolean option --- openpype/hosts/maya/api/lib_renderproducts.py | 20 ++++++++++++------- .../plugins/publish/submit_publish_job.py | 2 +- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index a95c1c4932..78a0a89472 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,7 +536,11 @@ class RenderProductsArnold(ARenderProducts): products = [] aov_name = self._get_attr(aov, "name") - multipart = bool(self._get_attr("defaultArnoldDriver.multipart")) + multipart = False + multilayer = bool(self._get_attr("defaultArnoldDriver.multipart")) + merge_AOVs = bool(self._get_attr("defaultArnoldDriver.mergeAOVs")) + if multilayer or merge_AOVs: + multipart = True ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -1018,9 +1022,11 @@ class RenderProductsRedshift(ARenderProducts): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) \ - or bool(self._get_attr("redshiftOptions.exrMultipart")) + multipart = False + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) + if exMultipart or force_layer: + multipart = True # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer @@ -1048,7 +1054,7 @@ class RenderProductsRedshift(ARenderProducts): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - aov_multipart = not multipart + # aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. @@ -1079,7 +1085,7 @@ class RenderProductsRedshift(ARenderProducts): productName=aov_light_group_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1093,7 +1099,7 @@ class RenderProductsRedshift(ARenderProducts): product = RenderProduct(productName=aov_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 27400bb269..e87cc6beeb 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -500,7 +500,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance_data.get("multipartExr"): preview = True - + self.log.info("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name From 81d09b98ffa87983d08ee8fb6e5ef83f23f231d2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 17:58:26 +0800 Subject: [PATCH 133/409] fixing te multipart boolean option --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 78a0a89472..58fcd2d281 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1023,7 +1023,7 @@ class RenderProductsRedshift(ARenderProducts): # like Cryptomatte. # AOVs are merged in multi-channel file multipart = False - force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) # noqa exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) if exMultipart or force_layer: multipart = True From 9324bf25383a773d8789a7d6debeea200b179b6f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 11 Nov 2022 18:05:42 +0800 Subject: [PATCH 134/409] fixing te multipart boolean option --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index e87cc6beeb..c1e9dd4015 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -495,8 +495,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - # toggle preview on if multipart is on preview = match_aov_pattern(app, aov_patterns, render_file_name) + # toggle preview on if multipart is on if instance_data.get("multipartExr"): preview = True From a09ab62eb7ab9c06dd99fb1b44d6946a30bf3d12 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:07:51 +0100 Subject: [PATCH 135/409] :recycle: some tweaks --- .../plugins/create/create_online.py | 20 +++++++------------ .../plugins/publish/collect_online_file.py | 6 +++--- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index e8092e8eaf..91016dc794 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -5,9 +5,6 @@ Online file retain their original name and use it as subset name. To avoid conflicts, this creator checks if subset with this name already exists under selected asset. """ -import copy -import os -import re from pathlib import Path from openpype.client import get_subset_by_name, get_asset_by_name @@ -16,11 +13,6 @@ from openpype.pipeline import ( CreatedInstance, CreatorError ) -from openpype.pipeline.create import ( - get_subset_name, - TaskNotSetError, -) - from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator @@ -31,14 +23,16 @@ class OnlineCreator(TrayPublishCreator): label = "Online" family = "online" description = "Publish file retaining its original file name" - extensions = [".mov", ".mp4", ".mfx", ".m4v", ".mpg"] + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg"] def get_detail_description(self): - return """# Publish batch of .mov to multiple assets. + return """# Create file retaining its original file name. - File names must then contain only asset name, or asset name + version. - (eg. 'chair.mov', 'chair_v001.mov', not really safe `my_chair_v001.mov` - """ + This will publish files using template helping to retain original + file name and that file name is used as subset name. + + Bz default it tries to guard against multiple publishes of the same + file.""" def get_icon(self): return "fa.file" diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 1d173c326b..459ee463aa 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -3,9 +3,10 @@ import pyblish.api from pathlib import Path -class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): +class CollectOnlineFile(pyblish.api.InstancePlugin): """Collect online file and retain its file name.""" - label = "Collect online file" + label = "Collect Online File" + order = pyblish.api.CollectorOrder families = ["online"] hosts = ["traypublisher"] @@ -21,4 +22,3 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin): "stagingDir": file.parent.as_posix() } ] - From b8b184b1b6c90fefcba386886554ebb32f99798c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:02 +0100 Subject: [PATCH 136/409] :art: add validator --- .../plugins/publish/validate_online_file.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_online_file.py diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py new file mode 100644 index 0000000000..86b9334184 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin, +) +from openpype.client import get_subset_by_name, get_asset_by_name + + +class ValidateOnlineFile(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validate that subset doesn't exist yet.""" + label = "Validate Existing Online Files" + hosts = ["traypublisher"] + families = ["online"] + order = ValidateContentsOrder + + optional = True + + def process(self, instance): + project_name = instance.context.data["projectName"] + asset_id = instance.data["assetEntity"]["_id"] + subset = get_subset_by_name( + project_name, instance.data["subset"], asset_id) + + if subset: + raise PublishValidationError( + "Subset to be published already exists.", + title=self.label + ) From 9d304f07da447f9a5686be702d6a930c0dc774dd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:29 +0100 Subject: [PATCH 137/409] :art: add family to integrator --- openpype/plugins/publish/integrate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0998e643e6..401270a788 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -129,7 +129,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsd", "mvUsdComposition", "mvUsdOverride", - "simpleUnrealTexture" + "simpleUnrealTexture", + "online" ] default_template_name = "publish" From cae09e0002ba379bbd5b39ce6720e6a2ff07b1ca Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:08:55 +0100 Subject: [PATCH 138/409] :label: fix docstring hints --- openpype/client/entities.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..bbef8dc65e 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,10 +389,11 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - None: If subset with specified filters was not found. - Dict: Subset document which can be reduced to specified 'fields'. - """ + Union[str, Dict]: None if subset with specified filters was not found. + or dict subset document which can be reduced to + specified 'fields'. + """ if not subset_name: return None From deac4a33d41d9914a41437e21db6ac0af81d797c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:19:40 +0100 Subject: [PATCH 139/409] :rotating_light: fix hound :dog: --- .../traypublisher/plugins/create/create_online.py | 12 ++++++------ .../plugins/publish/validate_online_file.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 91016dc794..22d4b73aee 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -30,7 +30,7 @@ class OnlineCreator(TrayPublishCreator): This will publish files using template helping to retain original file name and that file name is used as subset name. - + Bz default it tries to guard against multiple publishes of the same file.""" @@ -52,11 +52,11 @@ class OnlineCreator(TrayPublishCreator): instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = (Path( - pre_create_data.get( - "representation_file")["directory"] - ) / pre_create_data.get( - "representation_file")["filenames"][0]).as_posix() + path = ( + Path( + pre_create_data.get("representation_file")["directory"] + ) / pre_create_data.get("representation_file")["filenames"][0] + ).as_posix() instance_data["creator_attributes"] = {"path": path} diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py index 86b9334184..12b2e72ced 100644 --- a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -6,7 +6,7 @@ from openpype.pipeline.publish import ( PublishValidationError, OptionalPyblishPluginMixin, ) -from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name class ValidateOnlineFile(OptionalPyblishPluginMixin, From dbd00b3751eb6e9ffa378eb0b0c5985afbfdf41e Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 11 Nov 2022 15:22:29 +0100 Subject: [PATCH 140/409] :rotating_light: hound fix 2 --- .../hosts/traypublisher/plugins/create/create_online.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 22d4b73aee..5a6373730d 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -52,11 +52,7 @@ class OnlineCreator(TrayPublishCreator): instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = ( - Path( - pre_create_data.get("representation_file")["directory"] - ) / pre_create_data.get("representation_file")["filenames"][0] - ).as_posix() + path = (Path(pre_create_data.get("representation_file")["directory"]) / pre_create_data.get("representation_file")["filenames"][0]).as_posix() # noqa instance_data["creator_attributes"] = {"path": path} From cf0cba1dba0d14b60ca1bff0f9d9170aff88bb43 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 11 Nov 2022 15:43:38 +0100 Subject: [PATCH 141/409] fix variable check in collect anatomy instance data --- openpype/plugins/publish/collect_anatomy_instance_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index f67d3373d9..909b49a07d 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -188,7 +188,7 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin): for subset_doc in subset_docs: subset_id = subset_doc["_id"] last_version_doc = last_version_docs_by_subset_id.get(subset_id) - if last_version_docs_by_subset_id is None: + if last_version_doc is None: continue asset_id = subset_doc["parent"] From ae8342c57932806f05b7e13a7d82ad7d0b5c4d0b Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 18:40:20 +0800 Subject: [PATCH 142/409] aov Filtering --- vendor/configs/OpenColorIO-Configs | 1 - 1 file changed, 1 deletion(-) delete mode 160000 vendor/configs/OpenColorIO-Configs diff --git a/vendor/configs/OpenColorIO-Configs b/vendor/configs/OpenColorIO-Configs deleted file mode 160000 index 0bb079c08b..0000000000 --- a/vendor/configs/OpenColorIO-Configs +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0bb079c08be410030669cbf5f19ff869b88af953 From f6495ca956c709cf33654d12c80cadedb5a272d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Nov 2022 13:30:41 +0100 Subject: [PATCH 143/409] OP-4394 - extension is lowercased in Setting and in uploaded files --- .../webpublisher/plugins/publish/collect_published_files.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 2bf097de41..ac4ade4e48 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -86,6 +86,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): first_file = task_data["files"][0] _, extension = os.path.splitext(first_file) + extension = extension.lower() family, families, tags = self._get_family( self.task_type_to_family, task_type, @@ -244,7 +245,10 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for config in families_config: if is_sequence != config["is_sequence"]: continue - if (extension in config["extensions"] or + + lower_extensions = [ext.lower() + for ext in config.get("extensions", [])] + if (extension.lower() in lower_extensions or '' in config["extensions"]): # all extensions setting found_family = config["result_family"] break From 93b9dd7224e669c4f453dc0578ebc57ce0812c6f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Nov 2022 13:40:28 +0100 Subject: [PATCH 144/409] OP-4394 - extension is lowercased in Setting and in uploaded files --- .../webpublisher/plugins/publish/collect_published_files.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index ac4ade4e48..40f4da9403 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -247,9 +247,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): continue lower_extensions = [ext.lower() - for ext in config.get("extensions", [])] + for ext in config.get("extensions", [''])] if (extension.lower() in lower_extensions or - '' in config["extensions"]): # all extensions setting + lower_extensions[0] == ''): # all extensions setting found_family = config["result_family"] break From 1e995ea6d921611f221c3352a958cc1d960e8884 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:06:12 +0100 Subject: [PATCH 145/409] remove reviewMain checks --- .../kitsu/plugins/publish/integrate_kitsu_review.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index 61d5a13660..bf77f2c892 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -32,13 +32,8 @@ class IntegrateKitsuReview(pyblish.api.InstancePlugin): continue review_path = representation.get("published_path") - file_name, file_extension = os.path.splitext(review_path) - - if instance.data.get('name') != 'reviewMain' \ - or file_extension != '.mp4': - continue - self.log.debug("Found review at: {}".format(review_path)) + gazu.task.add_preview( task, comment, review_path, normalize_movie=True ) From 6934b3e0ef92101871909d4b643c444001a4c478 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:09:18 +0100 Subject: [PATCH 146/409] add an option to chose which families will be uploaded to kitsu --- .../settings/defaults/project_settings/tvpaint.json | 6 ++++++ .../projects_schema/schema_project_tvpaint.json | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 88b5a598cd..2e413f50cd 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -11,6 +11,12 @@ 255, 255, 255 + ], + "families_to_upload": [ + "review", + "renderpass", + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 20fe5b0855..0392c9089b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -56,6 +56,18 @@ "key": "review_bg", "label": "Review BG color", "use_alpha": false + }, + { + "type": "enum", + "key": "families_to_upload", + "label": "Families to upload", + "multiselection": true, + "enum_items": [ + {"review": "review"}, + {"renderpass": "renderPass"}, + {"renderlayer": "renderLayer"}, + {"renderscene": "renderScene"} + ] } ] }, From 4c1d1f961511e6fe9a0a87d84bc16b1b3b710011 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:10:12 +0100 Subject: [PATCH 147/409] add review tag to the selected families in the tvpaint project settings --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 77712347bd..d8aef1ab6b 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -127,9 +127,9 @@ class ExtractSequence(pyblish.api.Extractor): output_frame_start ) - # Fill tags and new families + # Fill tags and new families from project settings tags = [] - if family_lowered in ("review", "renderlayer", "renderscene"): + if family_lowered in self.families_to_upload: tags.append("review") # Sequence of one frame From fd08bbf17026aa3be3045804503342ce5f9a02c7 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 14 Nov 2022 15:13:11 +0100 Subject: [PATCH 148/409] remove useless import --- openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf77f2c892..e5e6439439 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -import os import gazu import pyblish.api From f18efd29b2aebe89f3cc8dbbbf03dc9bdfdff5b2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 15 Nov 2022 17:36:30 +0100 Subject: [PATCH 149/409] OP-4394 - fix - lowercase extension everywhere Without it it would be stored in DB uppercased and final name would also be uppercased. --- .../webpublisher/plugins/publish/collect_published_files.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 40f4da9403..265e78a6c7 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -181,6 +181,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): def _get_single_repre(self, task_dir, files, tags): _, ext = os.path.splitext(files[0]) + ext = ext.lower() repre_data = { "name": ext[1:], "ext": ext[1:], @@ -200,6 +201,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): frame_start = list(collections[0].indexes)[0] frame_end = list(collections[0].indexes)[-1] ext = collections[0].tail + ext = ext.lower() repre_data = { "frameStart": frame_start, "frameEnd": frame_end, From fbd7531a311d1a0287c45babb12a7b029cd50a7d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 15 Nov 2022 18:42:46 +0100 Subject: [PATCH 150/409] change label of stopped publishing --- openpype/tools/publisher/widgets/validations_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 8c483e8088..935a12bc73 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -511,7 +511,7 @@ class ValidationsWidget(QtWidgets.QFrame): ) # After success publishing publish_started_widget = ValidationArtistMessage( - "Publishing went smoothly", self + "So far so good", self ) # After success publishing publish_stop_ok_widget = ValidationArtistMessage( From 4dd276fc4682f379bc1eaf2b088c24252920eeef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 23:23:26 +0000 Subject: [PATCH 151/409] Bump loader-utils from 1.4.1 to 1.4.2 in /website Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a3802..220a489dfa 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From 0645089ad61f0a893ce717a5cf4574ca81cd8ef2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 10:38:08 +0100 Subject: [PATCH 152/409] size of button is fully defined by style --- openpype/style/style.css | 4 ++++ openpype/tools/publisher/window.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index 887c044dae..a7a48cdb9d 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1126,6 +1126,10 @@ ValidationArtistMessage QLabel { background: transparent; } +CreateNextPageOverlay { + font-size: 32pt; +} + /* Settings - NOT USED YET - we need to define font family for settings UI */ diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 281c7ad2a1..febf55b919 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -716,7 +716,7 @@ class PublisherWindow(QtWidgets.QDialog): def _update_create_overlay_size(self): metrics = self._create_overlay_button.fontMetrics() - size = int(metrics.height() * 3) + size = int(metrics.height()) end_pos_x = self.width() start_pos_x = end_pos_x - size From 20dacc342b5b4f5ff407fd616d0dc7818c551844 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 10:40:22 +0100 Subject: [PATCH 153/409] change style of button --- openpype/tools/publisher/widgets/widgets.py | 17 +++++++++-------- openpype/tools/publisher/window.py | 17 ++++++++++++----- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 71f476c4ef..ce3d91ce63 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -1832,23 +1832,24 @@ class CreateNextPageOverlay(QtWidgets.QWidget): rect = QtCore.QRect(self.rect()) rect_width = rect.width() rect_height = rect.height() + radius = rect_width * 0.2 - size = rect_width * 0.9 - - x_offset = (rect_width - size) * 0.5 - y_offset = (rect_height - size) * 0.5 + x_offset = 0 + y_offset = 0 if self._anim_value != 1.0: x_offset += rect_width - (rect_width * self._anim_value) - arrow_half_height = size * 0.2 - arrow_x_start = x_offset + (size * 0.4) + arrow_height = rect_height * 0.4 + arrow_half_height = arrow_height * 0.5 + arrow_x_start = x_offset + ((rect_width - arrow_half_height) * 0.5) arrow_x_end = arrow_x_start + arrow_half_height center_y = rect.center().y() painter.setBrush(self._bg_color) - painter.drawEllipse( + painter.drawRoundedRect( x_offset, y_offset, - size, size + rect_width + radius, rect_height, + radius, radius ) src_arrow_path = QtGui.QPainterPath() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index febf55b919..de26630312 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -257,7 +257,9 @@ class PublisherWindow(QtWidgets.QDialog): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) - create_overlay_button.clicked.connect(self._go_to_publish_tab) + create_overlay_button.clicked.connect( + self._on_create_overlay_button_click + ) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -471,6 +473,10 @@ class PublisherWindow(QtWidgets.QDialog): self._help_dialog.width(), self._help_dialog.height() ) + def _on_create_overlay_button_click(self): + self._create_overlay_button.set_under_mouse(False) + self._go_to_publish_tab() + def _on_tab_change(self, old_tab, new_tab): if old_tab == "details": self._publish_details_widget.close_details_popup() @@ -716,19 +722,20 @@ class PublisherWindow(QtWidgets.QDialog): def _update_create_overlay_size(self): metrics = self._create_overlay_button.fontMetrics() - size = int(metrics.height()) + height = int(metrics.height()) + width = int(height * 0.7) end_pos_x = self.width() - start_pos_x = end_pos_x - size + start_pos_x = end_pos_x - width center = self._content_widget.parent().mapTo( self, self._content_widget.rect().center() ) - pos_y = center.y() - (size * 0.5) + pos_y = center.y() - (height * 0.5) self._create_overlay_button.setGeometry( start_pos_x, pos_y, - size, size + width, height ) def _update_create_overlay_visibility(self, global_pos=None): From 91a4a06ab6e9f9e9a8c6378aeebe014fbe6c9a21 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:26 +0100 Subject: [PATCH 154/409] change maximum number of frame start/end and clip in/out in anatomy settings --- .../schemas/schema_anatomy_attributes.json | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json index a2a566da0e..3667c9d5d8 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json @@ -16,22 +16,26 @@ { "type": "number", "key": "frameStart", - "label": "Frame Start" + "label": "Frame Start", + "maximum": 999999999 }, { "type": "number", "key": "frameEnd", - "label": "Frame End" + "label": "Frame End", + "maximum": 999999999 }, { "type": "number", "key": "clipIn", - "label": "Clip In" + "label": "Clip In", + "maximum": 999999999 }, { "type": "number", "key": "clipOut", - "label": "Clip Out" + "label": "Clip Out", + "maximum": 999999999 }, { "type": "number", From 33656d00550c64f50d9e42088c389a43315cd905 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:43 +0100 Subject: [PATCH 155/409] project manager has higher max numbers --- openpype/tools/project_manager/project_manager/view.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index cca892ef72..8d1fe54e83 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -28,7 +28,7 @@ class NameDef: class NumberDef: def __init__(self, minimum=None, maximum=None, decimals=None): self.minimum = 0 if minimum is None else minimum - self.maximum = 999999 if maximum is None else maximum + self.maximum = 999999999 if maximum is None else maximum self.decimals = 0 if decimals is None else decimals From 8c6abf1c8faeea4ff5ecafa6c0e5dbfb22ce06cb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 11:32:59 +0100 Subject: [PATCH 156/409] remove duplicated key --- openpype/tools/settings/settings/constants.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index d98d18c8bf..23526e4de9 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -24,7 +24,6 @@ __all__ = ( "SETTINGS_PATH_KEY", "ROOT_KEY", - "SETTINGS_PATH_KEY", "VALUE_KEY", "SAVE_TIME_KEY", "PROJECT_NAME_KEY", From e11815b663d3910032fc6f2ec492df857ce91590 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 14:41:35 +0100 Subject: [PATCH 157/409] OP-4394 - safer handling of Settings extensions Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../plugins/publish/collect_published_files.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 265e78a6c7..181f8b4ab7 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -247,11 +247,17 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): for config in families_config: if is_sequence != config["is_sequence"]: continue + extensions = config.get("extensions") or [] + lower_extensions = set() + for ext in extensions: + if ext: + ext = ext.lower() + if ext.startswith("."): + ext = ext[1:] + lower_extensions.add(ext) - lower_extensions = [ext.lower() - for ext in config.get("extensions", [''])] - if (extension.lower() in lower_extensions or - lower_extensions[0] == ''): # all extensions setting + # all extensions setting + if not lower_extensions or extension in lower_extensions: found_family = config["result_family"] break From ed7795061f946ca71e7c3b09977c68525e3cd24c Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 12 Nov 2022 03:44:20 +0000 Subject: [PATCH 158/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 81b2925fb5..1953d0d6a5 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.3" +__version__ = "3.14.7-nightly.4" From f9732a8385a75384d91a424ac007285a082c9a2a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:52:59 +0100 Subject: [PATCH 159/409] renamed 'CollectAvalonEntities' to 'CollectContextEntities' --- .../hosts/tvpaint/plugins/publish/collect_instance_frames.py | 2 +- openpype/hosts/tvpaint/plugins/publish/validate_marks.py | 2 +- .../plugins/publish/validate_tvpaint_workfile_data.py | 2 +- ...collect_avalon_entities.py => collect_context_entities.py} | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) rename openpype/plugins/publish/{collect_avalon_entities.py => collect_context_entities.py} (97%) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py index f291c363b8..d5b79758ad 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -6,7 +6,7 @@ class CollectOutputFrameRange(pyblish.api.ContextPlugin): When instances are collected context does not contain `frameStart` and `frameEnd` keys yet. They are collected in global plugin - `CollectAvalonEntities`. + `CollectContextEntities`. """ label = "Collect output frame range" order = pyblish.api.CollectorOrder diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py index 12d50e17ff..0030b0fd1c 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -39,7 +39,7 @@ class ValidateMarks(pyblish.api.ContextPlugin): def get_expected_data(context): scene_mark_in = context.data["sceneMarkIn"] - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_end = context.data["frameEnd"] frame_start = context.data["frameStart"] handle_start = context.data["handleStart"] diff --git a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py index a5e4868411..d8b7bb9078 100644 --- a/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py +++ b/openpype/hosts/webpublisher/plugins/publish/validate_tvpaint_workfile_data.py @@ -13,7 +13,7 @@ class ValidateWorkfileData(pyblish.api.ContextPlugin): targets = ["tvpaint_worker"] def process(self, context): - # Data collected in `CollectAvalonEntities` + # Data collected in `CollectContextEntities` frame_start = context.data["frameStart"] frame_end = context.data["frameEnd"] handle_start = context.data["handleStart"] diff --git a/openpype/plugins/publish/collect_avalon_entities.py b/openpype/plugins/publish/collect_context_entities.py similarity index 97% rename from openpype/plugins/publish/collect_avalon_entities.py rename to openpype/plugins/publish/collect_context_entities.py index 3b05b6ae98..0a6072a820 100644 --- a/openpype/plugins/publish/collect_avalon_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -16,11 +16,11 @@ from openpype.client import get_project, get_asset_by_name from openpype.pipeline import legacy_io, KnownPublishError -class CollectAvalonEntities(pyblish.api.ContextPlugin): +class CollectContextEntities(pyblish.api.ContextPlugin): """Collect Anatomy into Context.""" order = pyblish.api.CollectorOrder - 0.1 - label = "Collect Avalon Entities" + label = "Collect Context Entities" def process(self, context): legacy_io.install() From 910b7d7120be3982548bf8913410cbb71669f0e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 18:53:34 +0100 Subject: [PATCH 160/409] get "asset" and "task" from context --- openpype/plugins/publish/collect_context_entities.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/collect_context_entities.py b/openpype/plugins/publish/collect_context_entities.py index 0a6072a820..31fbeb5dbd 100644 --- a/openpype/plugins/publish/collect_context_entities.py +++ b/openpype/plugins/publish/collect_context_entities.py @@ -3,6 +3,8 @@ Requires: session -> AVALON_ASSET context -> projectName + context -> asset + context -> task Provides: context -> projectEntity - Project document from database. @@ -13,20 +15,19 @@ Provides: import pyblish.api from openpype.client import get_project, get_asset_by_name -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import KnownPublishError class CollectContextEntities(pyblish.api.ContextPlugin): - """Collect Anatomy into Context.""" + """Collect entities into Context.""" order = pyblish.api.CollectorOrder - 0.1 label = "Collect Context Entities" def process(self, context): - legacy_io.install() project_name = context.data["projectName"] - asset_name = legacy_io.Session["AVALON_ASSET"] - task_name = legacy_io.Session["AVALON_TASK"] + asset_name = context.data["asset"] + task_name = context.data["task"] project_entity = get_project(project_name) if not project_entity: From 017720d754d5912d4df5a233168c64f3caccd56f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 10 Nov 2022 19:00:27 +0100 Subject: [PATCH 161/409] get "task" from context in anatomy context data --- openpype/plugins/publish/collect_anatomy_context_data.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_anatomy_context_data.py b/openpype/plugins/publish/collect_anatomy_context_data.py index 8433816908..55ce8e06f4 100644 --- a/openpype/plugins/publish/collect_anatomy_context_data.py +++ b/openpype/plugins/publish/collect_anatomy_context_data.py @@ -15,7 +15,6 @@ Provides: import json import pyblish.api -from openpype.pipeline import legacy_io from openpype.pipeline.template_data import get_template_data @@ -53,7 +52,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin): asset_entity = context.data.get("assetEntity") task_name = None if asset_entity: - task_name = legacy_io.Session["AVALON_TASK"] + task_name = context.data["task"] anatomy_data = get_template_data( project_entity, asset_entity, task_name, host_name, system_settings From db6bfcb1ee978a3aa77ed00b1dbc2462715a187b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Jul 2022 03:59:40 +0000 Subject: [PATCH 162/409] Bump terser from 5.10.0 to 5.14.2 in /website Bumps [terser](https://github.com/terser/terser) from 5.10.0 to 5.14.2. - [Release notes](https://github.com/terser/terser/releases) - [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md) - [Commits](https://github.com/terser/terser/commits) --- updated-dependencies: - dependency-name: terser dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 64 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 45 insertions(+), 19 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 7af15e9145..177a4a3802 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -1543,15 +1543,37 @@ dependencies: "@hapi/hoek" "^9.0.0" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/resolve-uri@^3.0.3": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.5.tgz#68eb521368db76d040a6315cdb24bf2483037b9c" - integrity sha512-VPeQ7+wH0itvQxnG+lIzWgkysKIr3L9sslimFW55rHMdGu/qCQ5z5h9zq4gI8uBtqkpHhsF4Z/OwExufUCThew== + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.11" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.11.tgz#771a1d8d744eeb71b6adb35808e1a6c7b9b8c8ec" - integrity sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg== + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== "@jridgewell/trace-mapping@^0.3.0": version "0.3.4" @@ -1561,6 +1583,14 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@mdx-js/mdx@1.6.22", "@mdx-js/mdx@^1.6.21": version "1.6.22" resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-1.6.22.tgz#8a723157bf90e78f17dc0f27995398e6c731f1ba" @@ -2140,10 +2170,10 @@ acorn@^6.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== -acorn@^8.0.4, acorn@^8.4.1: - version "8.7.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" - integrity sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ== +acorn@^8.0.4, acorn@^8.4.1, acorn@^8.5.0: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== address@^1.0.1, address@^1.1.2: version "1.1.2" @@ -6843,11 +6873,6 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@~0.7.2: - version "0.7.3" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" - integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== - sourcemap-codec@^1.4.4: version "1.4.8" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" @@ -7053,12 +7078,13 @@ terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.4: terser "^5.7.2" terser@^5.10.0, terser@^5.7.2: - version "5.10.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.10.0.tgz#b86390809c0389105eb0a0b62397563096ddafcc" - integrity sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA== + version "5.14.2" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.14.2.tgz#9ac9f22b06994d736174f4091aa368db896f1c10" + integrity sha512-oL0rGeM/WFQCUd0y2QrWxYnq7tfSuKBiqTjRPWrRgB46WD/kiwHwF8T23z78H6Q6kGCuuHcPB+KULHRdxvVGQA== dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" commander "^2.20.0" - source-map "~0.7.2" source-map-support "~0.5.20" text-table@^0.2.0: From ff760342c7719238e9ae06f9bb23c8747cabb615 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 14 Nov 2022 18:27:11 +0100 Subject: [PATCH 163/409] ignore case sensitivity of extension in files widget --- openpype/lib/attribute_definitions.py | 7 +++++++ openpype/tools/attribute_defs/files_widget.py | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 589a4ef9ab..6baeaec045 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -541,6 +541,13 @@ class FileDefItem(object): return ext return None + @property + def lower_ext(self): + ext = self.ext + if ext is not None: + return ext.lower() + return ext + @property def is_dir(self): if self.is_empty: diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py index 3f1e6a34e1..738e50ba07 100644 --- a/openpype/tools/attribute_defs/files_widget.py +++ b/openpype/tools/attribute_defs/files_widget.py @@ -349,7 +349,7 @@ class FilesModel(QtGui.QStandardItemModel): item.setData(file_item.filenames, FILENAMES_ROLE) item.setData(file_item.directory, DIRPATH_ROLE) item.setData(icon_pixmap, ITEM_ICON_ROLE) - item.setData(file_item.ext, EXT_ROLE) + item.setData(file_item.lower_ext, EXT_ROLE) item.setData(file_item.is_dir, IS_DIR_ROLE) item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE) @@ -463,7 +463,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: return True elif self._allow_folders: @@ -475,7 +475,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel): for filepath in filepaths: if os.path.isfile(filepath): _, ext = os.path.splitext(filepath) - if ext in self._allowed_extensions: + if ext.lower() in self._allowed_extensions: filtered_paths.append(filepath) elif self._allow_folders: From 213c78b9a019ac3a8956718e19564b9d5bdfa067 Mon Sep 17 00:00:00 2001 From: Roy Nieterau Date: Wed, 14 Sep 2022 00:16:06 +0200 Subject: [PATCH 164/409] Avoid name conflict where `group_name != group_node` due to maya auto renaming new node --- openpype/hosts/maya/plugins/load/load_yeti_cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_yeti_cache.py b/openpype/hosts/maya/plugins/load/load_yeti_cache.py index 090047e22d..5ba381050a 100644 --- a/openpype/hosts/maya/plugins/load/load_yeti_cache.py +++ b/openpype/hosts/maya/plugins/load/load_yeti_cache.py @@ -73,8 +73,8 @@ class YetiCacheLoader(load.LoaderPlugin): c = colors.get(family) if c is not None: - cmds.setAttr(group_name + ".useOutlinerColor", 1) - cmds.setAttr(group_name + ".outlinerColor", + cmds.setAttr(group_node + ".useOutlinerColor", 1) + cmds.setAttr(group_node + ".outlinerColor", (float(c[0])/255), (float(c[1])/255), (float(c[2])/255) From d662b34ca7a70ddb797b4aef4d570028c23a5031 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 15 Nov 2022 13:26:22 +0100 Subject: [PATCH 165/409] added settings for validate frame range in tray publisher --- .../project_settings/traypublisher.json | 7 +++++ .../schema_project_traypublisher.json | 18 +++++++++++++ .../schemas/template_validate_plugin.json | 26 +++++++++++++++++++ 3 files changed, 51 insertions(+) create mode 100644 openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json diff --git a/openpype/settings/defaults/project_settings/traypublisher.json b/openpype/settings/defaults/project_settings/traypublisher.json index 5db2a79772..e99b96b8c4 100644 --- a/openpype/settings/defaults/project_settings/traypublisher.json +++ b/openpype/settings/defaults/project_settings/traypublisher.json @@ -303,5 +303,12 @@ "extensions": [ ".mov" ] + }, + "publish": { + "ValidateFrameRange": { + "enabled": true, + "optional": true, + "active": true + } } } \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json index 7c61aeed50..faa5033d2a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_traypublisher.json @@ -311,6 +311,24 @@ "object_type": "text" } ] + }, + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "children": [ + { + "type": "schema_template", + "name": "template_validate_plugin", + "template_data": [ + { + "key": "ValidateFrameRange", + "label": "Validate frame range" + } + ] + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json new file mode 100644 index 0000000000..b57cad6719 --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_validate_plugin.json @@ -0,0 +1,26 @@ +[ + { + "type": "dict", + "collapsible": true, + "key": "{key}", + "label": "{label}", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "optional", + "label": "Optional" + }, + { + "type": "boolean", + "key": "active", + "label": "Active" + } + ] + } +] From 125d0bbeed7b07640bc34dd877dac2e4c814895f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Mon, 31 Oct 2022 13:28:50 +0100 Subject: [PATCH 166/409] Feature: Auto download last published workfile as first workfile --- .../hooks/pre_copy_last_published_workfile.py | 124 ++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 9 +- 2 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 openpype/hooks/pre_copy_last_published_workfile.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py new file mode 100644 index 0000000000..004f9d25e7 --- /dev/null +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -0,0 +1,124 @@ +import gc +import os +import shutil +from openpype.client.entities import ( + get_last_version_by_subset_id, + get_representations, + get_subsets, +) +from openpype.lib import PreLaunchHook +from openpype.modules.base import ModulesManager +from openpype.pipeline.load.utils import get_representation_path + + +class CopyLastPublishedWorkfile(PreLaunchHook): + """Copy last published workfile as first workfile. + + Prelaunch hook works only if last workfile leads to not existing file. + - That is possible only if it's first version. + """ + + # Before `AddLastWorkfileToLaunchArgs` + order = -1 + app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"] + + def execute(self): + """Check if local workfile doesn't exist, else copy it. + + 1- Check if setting for this feature is enabled + 2- Check if workfile in work area doesn't exist + 3- Check if published workfile exists and is copied locally in publish + + Returns: + None: This is a void method. + """ + # TODO setting + self.log.info("Trying to fetch last published workfile...") + + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + project_name = self.data["project_name"] + task_name = self.data["task_name"] + + project_doc = self.data.get("project_doc") + asset_doc = self.data.get("asset_doc") + anatomy = self.data.get("anatomy") + if project_doc and asset_doc: + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + )["_id"] + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) + + if workfile_representation: # TODO add setting + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) + + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() + + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() + + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 8b11055e65..def9e6cfd8 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -236,7 +236,11 @@ class SyncServerThread(threading.Thread): """ def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - super(SyncServerThread, self).__init__() + + # Event to trigger files have been processed + self.files_processed = threading.Event() + + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None self.is_running = False @@ -396,6 +400,8 @@ class SyncServerThread(threading.Thread): representation, site, error) + # Trigger files are processed + self.files_processed.set() duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -454,6 +460,7 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" + self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): From af15b0d9415d1bfd2bff978ad81d370484d36bdb Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:00:26 +0100 Subject: [PATCH 167/409] Project setting --- .../hooks/pre_copy_last_published_workfile.py | 119 ++++++++++++------ .../defaults/project_settings/global.json | 3 +- .../schemas/schema_global_tools.json | 5 + 3 files changed, 88 insertions(+), 39 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 004f9d25e7..312548d2db 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -7,8 +7,10 @@ from openpype.client.entities import ( get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.profiles_filtering import filter_profiles from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path +from openpype.settings.lib import get_project_settings class CopyLastPublishedWorkfile(PreLaunchHook): @@ -32,9 +34,45 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ - # TODO setting + project_name = self.data["project_name"] + task_name = self.data["task_name"] + task_type = self.data["task_type"] + host_name = self.application.host_name + + # Check settings has enabled it + project_settings = get_project_settings(project_name) + profiles = project_settings["global"]["tools"]["Workfiles"][ + "last_workfile_on_startup" + ] + filter_data = { + "tasks": task_name, + "task_types": task_type, + "hosts": host_name, + } + last_workfile_settings = filter_profiles(profiles, filter_data) + use_last_published_workfile = last_workfile_settings.get( + "use_last_published_workfile" + ) + if use_last_published_workfile is None: + self.log.info( + ( + "Seems like old version of settings is used." + ' Can\'t access custom templates in host "{}".' + ).format(host_name) + ) + return + elif use_last_published_workfile is False: + self.log.info( + ( + 'Project "{}" has turned off to use last published workfile' + ' as first workfile for host "{}"' + ).format(project_name, host_name) + ) + return + self.log.info("Trying to fetch last published workfile...") + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): self.log.debug( @@ -44,9 +82,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return - project_name = self.data["project_name"] - task_name = self.data["task_name"] - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") @@ -65,6 +100,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) return # Get workfile representation @@ -84,41 +122,46 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) - if workfile_representation: # TODO add setting - # Get sync server from Tray, which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Get sync server from Tray, which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) + + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9c3f2f1e1b..7daa4afa79 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -458,7 +458,8 @@ "hosts": [], "task_types": [], "tasks": [], - "enabled": true + "enabled": true, + "use_last_published_workfile": false } ], "open_workfile_tool_on_startup": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json index ba446135e2..962008d476 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_tools.json @@ -149,6 +149,11 @@ "type": "boolean", "key": "enabled", "label": "Enabled" + }, + { + "type": "boolean", + "key": "use_last_published_workfile", + "label": "Use last published workfile" } ] } From 7a7c91c418f1084dacd25e6aa453e0c70caf9fcd Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:10:12 +0100 Subject: [PATCH 168/409] docstring --- openpype/hooks/pre_copy_last_published_workfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 312548d2db..b1b2fe2366 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -30,6 +30,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): 1- Check if setting for this feature is enabled 2- Check if workfile in work area doesn't exist 3- Check if published workfile exists and is copied locally in publish + 4- Substitute copied published workfile as first workfile Returns: None: This is a void method. From e24489c463af8ce3a83807df69af984357363bfb Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:14:58 +0100 Subject: [PATCH 169/409] comment length --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index b1b2fe2366..d342151823 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -129,7 +129,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, which handles the asynchronous thread instance + # Get sync server from Tray, + # which handles the asynchronous thread instance sync_server = next( ( t["sync_server"] From 17853d0b3b55658310bef044eb65bed19d533bed Mon Sep 17 00:00:00 2001 From: Felix David Date: Tue, 1 Nov 2022 10:50:30 +0100 Subject: [PATCH 170/409] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 4 ++-- openpype/modules/sync_server/sync_server.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index d342151823..cf4edeac9b 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -65,8 +65,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): elif use_last_published_workfile is False: self.log.info( ( - 'Project "{}" has turned off to use last published workfile' - ' as first workfile for host "{}"' + 'Project "{}" has turned off to use last published' + ' workfile as first workfile for host "{}"' ).format(project_name, host_name) ) return diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index def9e6cfd8..353b39c4e1 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -239,7 +239,7 @@ class SyncServerThread(threading.Thread): # Event to trigger files have been processed self.files_processed = threading.Event() - + super(SyncServerThread, self).__init__(args=(self.files_processed,)) self.module = module self.loop = None From 881bcebd1dbec626d1b1e48ebf079746ad567b0c Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 11:41:59 +0100 Subject: [PATCH 171/409] requested cosmetic changes --- .../hooks/pre_copy_last_published_workfile.py | 172 +++++++++--------- 1 file changed, 90 insertions(+), 82 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cf4edeac9b..7a835507f7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -35,6 +35,17 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + # Check there is no workfile available + last_workfile = self.data.get("last_workfile_path") + if os.path.exists(last_workfile): + self.log.debug( + "Last workfile exists. Skipping {} process.".format( + self.__class__.__name__ + ) + ) + return + + # Get data project_name = self.data["project_name"] task_name = self.data["task_name"] task_type = self.data["task_type"] @@ -73,97 +84,94 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info("Trying to fetch last published workfile...") - # Check there is no workfile available - last_workfile = self.data.get("last_workfile_path") - if os.path.exists(last_workfile): - self.log.debug( - "Last workfile exists. Skipping {} process.".format( - self.__class__.__name__ - ) - ) - return - project_doc = self.data.get("project_doc") asset_doc = self.data.get("asset_doc") anatomy = self.data.get("anatomy") - if project_doc and asset_doc: - # Get subset id - subset_id = next( - ( - subset["_id"] - for subset in get_subsets( - project_name, - asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], - ) - if subset["data"]["family"] == "workfile" - ), - None, - ) - if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] - ) - return - # Get workfile representation - workfile_representation = next( - ( - representation - for representation in get_representations( - project_name, - version_ids=[ + # Check it can proceed + if not project_doc and not asset_doc: + return + + # Get subset id + subset_id = next( + ( + subset["_id"] + for subset in get_subsets( + project_name, + asset_ids=[asset_doc["_id"]], + fields=["_id", "data.family"], + ) + if subset["data"]["family"] == "workfile" + ), + None, + ) + if not subset_id: + self.log.debug('No any workfile for asset "{}".').format( + asset_doc["name"] + ) + return + + # Get workfile representation + workfile_representation = next( + ( + representation + for representation in get_representations( + project_name, + version_ids=[ + ( get_last_version_by_subset_id( project_name, subset_id, fields=["_id"] - )["_id"] - ], - ) - if representation["context"]["task"]["name"] == task_name - ), - None, - ) + ) + or {} + ).get("_id") + ], + ) + if representation["context"]["task"]["name"] == task_name + ), + None, + ) - if not workfile_representation: - self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) - return + if not workfile_representation: + self.log.debug( + 'No published workfile for task "{}" and host "{}".' + ).format(task_name, host_name) + return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # Get sync server from Tray, + # which handles the asynchronous thread instance + sync_server = next( + ( + t["sync_server"] + for t in [ + obj + for obj in gc.get_objects() + if isinstance(obj, ModulesManager) + ] + if t["sync_server"].sync_server_thread + ), + None, + ) - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, - ) - sync_server.reset_timer() + # Add site and reset timer + active_site = sync_server.get_active_site(project_name) + sync_server.add_site( + project_name, + workfile_representation["_id"], + active_site, + force=True, + ) + sync_server.reset_timer() - # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + # Wait for the download loop to end + sync_server.sync_server_thread.files_processed.wait() - # Get paths - published_workfile_path = get_representation_path( - workfile_representation, root=anatomy.roots - ) - local_workfile_dir = os.path.dirname(last_workfile) + # Get paths + published_workfile_path = get_representation_path( + workfile_representation, root=anatomy.roots + ) + local_workfile_dir = os.path.dirname(last_workfile) - # Copy file and substitute path - self.data["last_workfile_path"] = shutil.copy( - published_workfile_path, local_workfile_dir - ) + # Copy file and substitute path + self.data["last_workfile_path"] = shutil.copy( + published_workfile_path, local_workfile_dir + ) From 9e01c5deaa1615316b82d6123df8ffa1101a15ec Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:33:54 +0100 Subject: [PATCH 172/409] Change to REST API using web server --- .../hooks/pre_copy_last_published_workfile.py | 54 ++++++++------- openpype/modules/sync_server/rest_api.py | 68 +++++++++++++++++++ openpype/modules/sync_server/sync_server.py | 12 ++-- .../modules/sync_server/sync_server_module.py | 9 +++ openpype/modules/timers_manager/rest_api.py | 2 +- 5 files changed, 112 insertions(+), 33 deletions(-) create mode 100644 openpype/modules/sync_server/rest_api.py diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 7a835507f7..cefc7e5d40 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -1,14 +1,14 @@ -import gc import os import shutil +from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, get_representations, get_subsets, ) from openpype.lib import PreLaunchHook +from openpype.lib.local_settings import get_local_site_id from openpype.lib.profiles_filtering import filter_profiles -from openpype.modules.base import ModulesManager from openpype.pipeline.load.utils import get_representation_path from openpype.settings.lib import get_project_settings @@ -137,33 +137,37 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # Get sync server from Tray, - # which handles the asynchronous thread instance - sync_server = next( - ( - t["sync_server"] - for t in [ - obj - for obj in gc.get_objects() - if isinstance(obj, ModulesManager) - ] - if t["sync_server"].sync_server_thread - ), - None, - ) + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return - # Add site and reset timer - active_site = sync_server.get_active_site(project_name) - sync_server.add_site( - project_name, - workfile_representation["_id"], - active_site, - force=True, + entry_point_url = "{}/sync_server".format(webserver_url) + rest_api_url = "{}/add_sites_to_representations".format( + entry_point_url + ) + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations ('requests' is not available)" + ) + return + + requests.post( + rest_api_url, + json={ + "project_name": project_name, + "sites": [get_local_site_id()], + "representations": [str(workfile_representation["_id"])], + }, ) - sync_server.reset_timer() # Wait for the download loop to end - sync_server.sync_server_thread.files_processed.wait() + rest_api_url = "{}/files_are_processed".format(entry_point_url) + while requests.get(rest_api_url).content: + sleep(5) # Get paths published_workfile_path = get_representation_path( diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py new file mode 100644 index 0000000000..b7c5d26d15 --- /dev/null +++ b/openpype/modules/sync_server/rest_api.py @@ -0,0 +1,68 @@ +from aiohttp.web_response import Response +from openpype.lib import Logger + + +class SyncServerModuleRestApi: + """ + REST API endpoint used for calling from hosts when context change + happens in Workfile app. + """ + + def __init__(self, user_module, server_manager): + self._log = None + self.module = user_module + self.server_manager = server_manager + + self.prefix = "/sync_server" + + self.register() + + @property + def log(self): + if self._log is None: + self._log = Logger.get_logger(self.__class__.__name__) + return self._log + + def register(self): + self.server_manager.add_route( + "POST", + self.prefix + "/add_sites_to_representations", + self.add_sites_to_representations, + ) + self.server_manager.add_route( + "GET", + self.prefix + "/files_are_processed", + self.files_are_processed, + ) + + async def add_sites_to_representations(self, request): + # Extract data from request + data = await request.json() + try: + project_name = data["project_name"] + sites = data["sites"] + representations = data["representations"] + except KeyError: + msg = ( + "Payload must contain fields 'project_name," + " 'sites' (list of names) and 'representations' (list of IDs)" + ) + self.log.error(msg) + return Response(status=400, message=msg) + + # Add all sites to each representation + for representation_id in representations: + for site in sites: + self.module.add_site( + project_name, representation_id, site, force=True + ) + + # Force timer to run immediately + self.module.reset_timer() + + return Response(status=200) + + async def files_are_processed(self, _request): + return Response( + body=bytes(self.module.sync_server_thread.files_are_processed) + ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 353b39c4e1..7fd2311c2d 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -237,15 +237,13 @@ class SyncServerThread(threading.Thread): def __init__(self, module): self.log = Logger.get_logger(self.__class__.__name__) - # Event to trigger files have been processed - self.files_processed = threading.Event() - - super(SyncServerThread, self).__init__(args=(self.files_processed,)) + super(SyncServerThread, self).__init__() self.module = module self.loop = None self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None + self.files_are_processed = False def run(self): self.is_running = True @@ -400,8 +398,8 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files are processed - self.files_processed.set() + # Trigger files process finished + self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -460,7 +458,6 @@ class SyncServerThread(threading.Thread): async def run_timer(self, delay): """Wait for 'delay' seconds to start next loop""" - self.files_processed.clear() await asyncio.sleep(delay) def reset_timer(self): @@ -469,6 +466,7 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None + self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index e84c333a58..bff999723b 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -2089,6 +2089,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def cli(self, click_group): click_group.add_command(cli_main) + # Webserver module implementation + def webserver_initialization(self, server_manager): + """Add routes for syncs.""" + if self.tray_initialized: + from .rest_api import SyncServerModuleRestApi + self.rest_api_obj = SyncServerModuleRestApi( + self, server_manager + ) + @click.group(SyncServerModule.name, help="SyncServer module related commands.") def cli_main(): diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py index 4a2e9e6575..979db9075b 100644 --- a/openpype/modules/timers_manager/rest_api.py +++ b/openpype/modules/timers_manager/rest_api.py @@ -21,7 +21,7 @@ class TimersManagerModuleRestApi: @property def log(self): if self._log is None: - self._log = Logger.get_logger(self.__ckass__.__name__) + self._log = Logger.get_logger(self.__class__.__name__) return self._log def register(self): From 8d9542bb45088fec5c800fe7b7d9b76f5ca3c14c Mon Sep 17 00:00:00 2001 From: Felix David Date: Thu, 3 Nov 2022 18:37:13 +0100 Subject: [PATCH 173/409] lint --- openpype/hooks/pre_copy_last_published_workfile.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index cefc7e5d40..6bec4f7d2c 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -151,7 +151,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): import requests except Exception: self.log.warning( - "Couldn't add sites to representations ('requests' is not available)" + "Couldn't add sites to representations " + "('requests' is not available)" ) return From 5e02d7d2d71796b6826e320fd8cfbc3e77980d93 Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 4 Nov 2022 10:06:59 +0100 Subject: [PATCH 174/409] legacy compatibility --- openpype/hooks/pre_copy_last_published_workfile.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6bec4f7d2c..f3293fa511 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -99,9 +99,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): for subset in get_subsets( project_name, asset_ids=[asset_doc["_id"]], - fields=["_id", "data.family"], + fields=["_id", "data.family", "data.families"], ) - if subset["data"]["family"] == "workfile" + if subset["data"].get("family") == "workfile" + # Legacy compatibility + or "workfile" in subset["data"].get("families", {}) ), None, ) From 1c48c0936290cbd7013df50d83392f53a68d51dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 16:41:19 +0100 Subject: [PATCH 175/409] use 'created_dt' of representation --- openpype/client/entities.py | 28 +++++++++++++++++++ .../hooks/pre_copy_last_published_workfile.py | 19 +++++++++++-- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f1..43c2874f57 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ +from datetime import datetime import re import collections @@ -1367,6 +1368,33 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] +def get_representation_last_created_time_on_site( + representation: dict, site_name: str +) -> datetime: + """Get `created_dt` value for representation on site. + + Args: + representation (dict): Representation to get creation date of + site_name (str): Site from which to get the creation date + + Returns: + datetime: Created time of representation on site + """ + created_time = next( + ( + site.get("created_dt") + for site in representation["files"][0].get("sites", []) + if site["name"] == site_name + ), + None, + ) + if created_time: + return created_time + else: + # Use epoch as 'zero' time + return datetime.utcfromtimestamp(0) + + def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index f3293fa511..4eb66f6f85 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,6 +3,8 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, + get_representation_by_id, + get_representation_last_created_time_on_site, get_representations, get_subsets, ) @@ -158,18 +160,29 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ) return + local_site_id = get_local_site_id() requests.post( rest_api_url, json={ "project_name": project_name, - "sites": [get_local_site_id()], + "sites": [local_site_id], "representations": [str(workfile_representation["_id"])], }, ) # Wait for the download loop to end - rest_api_url = "{}/files_are_processed".format(entry_point_url) - while requests.get(rest_api_url).content: + last_created_time = get_representation_last_created_time_on_site( + workfile_representation, local_site_id + ) + while ( + last_created_time + >= get_representation_last_created_time_on_site( + get_representation_by_id( + project_name, workfile_representation["_id"] + ), + local_site_id, + ) + ): sleep(5) # Get paths From a614f0f805acd6d73c57dc68bc00a9d7834714cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:57:02 +0100 Subject: [PATCH 176/409] add priority to add_site --- openpype/modules/sync_server/rest_api.py | 17 ++++++----------- openpype/modules/sync_server/sync_server.py | 4 ---- .../modules/sync_server/sync_server_module.py | 13 ++++++++++--- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index b7c5d26d15..e92ddc8eee 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,4 +1,5 @@ from aiohttp.web_response import Response +from openpype.client.entities import get_representation_by_id from openpype.lib import Logger @@ -29,11 +30,6 @@ class SyncServerModuleRestApi: self.prefix + "/add_sites_to_representations", self.add_sites_to_representations, ) - self.server_manager.add_route( - "GET", - self.prefix + "/files_are_processed", - self.files_are_processed, - ) async def add_sites_to_representations(self, request): # Extract data from request @@ -54,15 +50,14 @@ class SyncServerModuleRestApi: for representation_id in representations: for site in sites: self.module.add_site( - project_name, representation_id, site, force=True + project_name, + representation_id, + site, + force=True, + priority=99, ) # Force timer to run immediately self.module.reset_timer() return Response(status=200) - - async def files_are_processed(self, _request): - return Response( - body=bytes(self.module.sync_server_thread.files_are_processed) - ) diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 7fd2311c2d..d0a40a60ff 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -243,7 +243,6 @@ class SyncServerThread(threading.Thread): self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) self.timer = None - self.files_are_processed = False def run(self): self.is_running = True @@ -398,8 +397,6 @@ class SyncServerThread(threading.Thread): representation, site, error) - # Trigger files process finished - self.files_are_processed = False duration = time.time() - start_time self.log.debug("One loop took {:.2f}s".format(duration)) @@ -466,7 +463,6 @@ class SyncServerThread(threading.Thread): if self.timer: self.timer.cancel() self.timer = None - self.files_are_processed = True def _working_sites(self, project_name): if self.module.is_project_paused(project_name): diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index bff999723b..6a1fc9a1c5 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False): + force=False, priority=None): """ Adds new site to representation to be synced. @@ -152,6 +152,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): representation_id (string): MongoDB _id value site_name (string): name of configured and active site force (bool): reset site if exists + priority (int): set priority Throws: SiteAlreadyPresentError - if adding already existing site and @@ -167,7 +168,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.reset_site_on_representation(project_name, representation_id, site_name=site_name, - force=force) + force=force, + priority=priority) def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): @@ -1655,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False): + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' and provider. @@ -1678,6 +1680,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): remove (bool): if True remove site altogether pause (bool or None): if True - pause, False - unpause force (bool): hard reset - currently only for add_site + priority (int): set priority Raises: SiteAlreadyPresentError - if adding already existing site and @@ -1705,6 +1708,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule): elem = {"name": site_name} + # Add priority + if priority: + elem["priority"] = priority + if file_id: # reset site for particular file self._reset_site_for_file(project_name, representation_id, elem, file_id, site_name) From b6365d85404b88dddcb218969f0a7e30e4668e08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:58:19 +0100 Subject: [PATCH 177/409] clean --- openpype/modules/sync_server/rest_api.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index e92ddc8eee..0c3b914833 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -1,5 +1,4 @@ from aiohttp.web_response import Response -from openpype.client.entities import get_representation_by_id from openpype.lib import Logger From 138051f2f4c9d0c705eeb1cd299166d0ca249850 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 18:59:03 +0100 Subject: [PATCH 178/409] clean --- openpype/modules/sync_server/sync_server_module.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6a1fc9a1c5..951cb116fc 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, priority=None): + remove=False, pause=None, force=False, + priority=None): """ Reset information about synchronization for particular 'file_id' and provider. From 3a5ebc6ea29fd4ec34b0fc80c27f5cc187ace8e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:02:29 +0100 Subject: [PATCH 179/409] sort fields --- openpype/hooks/pre_copy_last_published_workfile.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 4eb66f6f85..acbc9ec1c7 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -178,7 +178,9 @@ class CopyLastPublishedWorkfile(PreLaunchHook): last_created_time >= get_representation_last_created_time_on_site( get_representation_by_id( - project_name, workfile_representation["_id"] + project_name, + workfile_representation["_id"], + fields=["files"], ), local_site_id, ) From 29f0dee272c9b0b27c4a6e4098caab2ed11a1d7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 8 Nov 2022 19:04:03 +0100 Subject: [PATCH 180/409] clean --- openpype/modules/sync_server/sync_server_module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 951cb116fc..1292bed9af 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1657,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): def reset_site_on_representation(self, project_name, representation_id, side=None, file_id=None, site_name=None, - remove=False, pause=None, force=False, + remove=False, pause=None, force=False, priority=None): """ Reset information about synchronization for particular 'file_id' From a600cf4dcad5f8caa08187ca2d449bbf9986623a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:18:23 +0100 Subject: [PATCH 181/409] fix last version check --- .../hooks/pre_copy_last_published_workfile.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index acbc9ec1c7..96b5ccadb2 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -116,19 +116,19 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return # Get workfile representation + last_version_doc = get_last_version_by_subset_id( + project_name, subset_id, fields=["_id"] + ) + if not last_version_doc: + self.log.debug("Subset does not have any versions") + return + workfile_representation = next( ( representation for representation in get_representations( project_name, - version_ids=[ - ( - get_last_version_by_subset_id( - project_name, subset_id, fields=["_id"] - ) - or {} - ).get("_id") - ], + version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), From 7596610c160cf83b5dccb00c1638756312a54cf1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:03 +0100 Subject: [PATCH 182/409] replaced 'add_sites_to_representations' with 'reset_timer' in rest api --- openpype/modules/sync_server/rest_api.py | 31 +++--------------------- 1 file changed, 3 insertions(+), 28 deletions(-) diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 0c3b914833..51769cd4fb 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -26,36 +26,11 @@ class SyncServerModuleRestApi: def register(self): self.server_manager.add_route( "POST", - self.prefix + "/add_sites_to_representations", - self.add_sites_to_representations, + self.prefix + "/reset_timer", + self.reset_timer, ) - async def add_sites_to_representations(self, request): - # Extract data from request - data = await request.json() - try: - project_name = data["project_name"] - sites = data["sites"] - representations = data["representations"] - except KeyError: - msg = ( - "Payload must contain fields 'project_name," - " 'sites' (list of names) and 'representations' (list of IDs)" - ) - self.log.error(msg) - return Response(status=400, message=msg) - - # Add all sites to each representation - for representation_id in representations: - for site in sites: - self.module.add_site( - project_name, - representation_id, - site, - force=True, - priority=99, - ) - + async def reset_timer(self, request): # Force timer to run immediately self.module.reset_timer() From 2052afc76a72d5845570e4900e527cabe1d1ecb1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:15 +0100 Subject: [PATCH 183/409] added ability to rese timer from add_site --- openpype/modules/sync_server/sync_server_module.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 1292bed9af..5e19a6fce0 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -136,7 +136,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): """ Start of Public API """ def add_site(self, project_name, representation_id, site_name=None, - force=False, priority=None): + force=False, priority=None, reset_timer=False): """ Adds new site to representation to be synced. @@ -171,6 +171,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule): force=force, priority=priority) + if reset_timer: + self.reset_timer() + def remove_site(self, project_name, representation_id, site_name, remove_local_files=False): """ From f7c1fa01ae1b358b8af45ab777ebda4a6ba81bfc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:19:32 +0100 Subject: [PATCH 184/409] 'reset_timer' can reset timer via rest api endpoint --- .../modules/sync_server/sync_server_module.py | 37 ++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 5e19a6fce0..b505e25d2f 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -916,7 +916,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule): In case of user's involvement (reset site), start that right away. """ - self.sync_server_thread.reset_timer() + + if not self.enabled: + return + + if self.sync_server_thread is None: + self._reset_timer_with_rest_api() + else: + self.sync_server_thread.reset_timer() + + def is_representaion_on_site( + self, project_name, representation_id, site_id + ): + # TODO implement + return False + + def _reset_timer_with_rest_api(self): + # POST to webserver sites to add to representations + webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") + if not webserver_url: + self.log.warning("Couldn't find webserver url") + return + + rest_api_url = "{}/sync_server/reset_timer".format( + webserver_url + ) + + try: + import requests + except Exception: + self.log.warning( + "Couldn't add sites to representations " + "('requests' is not available)" + ) + return + + requests.post(rest_api_url) def get_enabled_projects(self): """Returns list of projects which have SyncServer enabled.""" From 1d028d22ba4b796b6e8fe700b70f2f2e87217edb Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:20:21 +0100 Subject: [PATCH 185/409] updated prelaunch hook with new abilities of sync server --- .../hooks/pre_copy_last_published_workfile.py | 52 +++++-------------- 1 file changed, 12 insertions(+), 40 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 96b5ccadb2..6fd50a64d6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -141,49 +141,21 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ).format(task_name, host_name) return - # POST to webserver sites to add to representations - webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL") - if not webserver_url: - self.log.warning("Couldn't find webserver url") - return - - entry_point_url = "{}/sync_server".format(webserver_url) - rest_api_url = "{}/add_sites_to_representations".format( - entry_point_url - ) - try: - import requests - except Exception: - self.log.warning( - "Couldn't add sites to representations " - "('requests' is not available)" - ) - return - local_site_id = get_local_site_id() - requests.post( - rest_api_url, - json={ - "project_name": project_name, - "sites": [local_site_id], - "representations": [str(workfile_representation["_id"])], - }, + sync_server = self.modules_manager.get("sync_server") + sync_server.add_site( + project_name, + workfile_representation["_id"], + local_site_id, + force=True, + priority=99, + reset_timer=True ) - # Wait for the download loop to end - last_created_time = get_representation_last_created_time_on_site( - workfile_representation, local_site_id - ) - while ( - last_created_time - >= get_representation_last_created_time_on_site( - get_representation_by_id( - project_name, - workfile_representation["_id"], - fields=["files"], - ), - local_site_id, - ) + while not sync_server.is_representaion_on_site( + project_name, + workfile_representation["_id"], + local_site_id ): sleep(5) From 5db743080ccb22adc89f76fc86f4ca26020503fd Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:22:56 +0100 Subject: [PATCH 186/409] check if is sync server enabled --- openpype/hooks/pre_copy_last_published_workfile.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 6fd50a64d6..69e3d6efe4 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -37,6 +37,12 @@ class CopyLastPublishedWorkfile(PreLaunchHook): Returns: None: This is a void method. """ + + sync_server = self.modules_manager.get("sync_server") + if not sync_server or not sync_server.enabled: + self.log.deubg("Sync server module is not enabled or available") + return + # Check there is no workfile available last_workfile = self.data.get("last_workfile_path") if os.path.exists(last_workfile): @@ -142,7 +148,6 @@ class CopyLastPublishedWorkfile(PreLaunchHook): return local_site_id = get_local_site_id() - sync_server = self.modules_manager.get("sync_server") sync_server.add_site( project_name, workfile_representation["_id"], From 75e12954ee51d09916032224f4c72be84c12bacf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:36:03 +0100 Subject: [PATCH 187/409] removed 'get_representation_last_created_time_on_site' function --- openpype/client/entities.py | 27 ------------------- .../hooks/pre_copy_last_published_workfile.py | 2 -- 2 files changed, 29 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43c2874f57..91d4b499b0 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -1368,33 +1368,6 @@ def get_representation_parents(project_name, representation): return parents_by_repre_id[repre_id] -def get_representation_last_created_time_on_site( - representation: dict, site_name: str -) -> datetime: - """Get `created_dt` value for representation on site. - - Args: - representation (dict): Representation to get creation date of - site_name (str): Site from which to get the creation date - - Returns: - datetime: Created time of representation on site - """ - created_time = next( - ( - site.get("created_dt") - for site in representation["files"][0].get("sites", []) - if site["name"] == site_name - ), - None, - ) - if created_time: - return created_time - else: - # Use epoch as 'zero' time - return datetime.utcfromtimestamp(0) - - def get_thumbnail_id_from_source(project_name, src_type, src_id): """Receive thumbnail id from source entity. diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 69e3d6efe4..884b0f54b6 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -3,8 +3,6 @@ import shutil from time import sleep from openpype.client.entities import ( get_last_version_by_subset_id, - get_representation_by_id, - get_representation_last_created_time_on_site, get_representations, get_subsets, ) From 99bebd82a7a3d9288799d4771a242a56dd58c40a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 9 Nov 2022 11:51:00 +0100 Subject: [PATCH 188/409] fix typo --- openpype/hooks/pre_copy_last_published_workfile.py | 2 +- openpype/modules/sync_server/sync_server_module.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 884b0f54b6..0e561334e1 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -155,7 +155,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): reset_timer=True ) - while not sync_server.is_representaion_on_site( + while not sync_server.is_representation_on_site( project_name, workfile_representation["_id"], local_site_id diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index b505e25d2f..1f65ea9bda 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -925,7 +925,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): else: self.sync_server_thread.reset_timer() - def is_representaion_on_site( + def is_representation_on_site( self, project_name, representation_id, site_id ): # TODO implement From 44cfbf9f2922c80c21875f6f25658e6041c6b677 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:27:07 +0100 Subject: [PATCH 189/409] added method to check if representation has all files on site --- .../modules/sync_server/sync_server_module.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 1f65ea9bda..6250146523 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -926,10 +926,27 @@ class SyncServerModule(OpenPypeModule, ITrayModule): self.sync_server_thread.reset_timer() def is_representation_on_site( - self, project_name, representation_id, site_id + self, project_name, representation_id, site_name ): - # TODO implement - return False + """Checks if 'representation_id' has all files avail. on 'site_name'""" + representation = get_representation_by_id(project_name, + representation_id, + fields=["_id", "files"]) + if not representation: + return False + + on_site = False + for file_info in representation.get("files", []): + for site in file_info.get("sites", []): + if site["name"] != site_name: + continue + + if (site.get("progress") or site.get("error") or + not site.get("created_dt")): + return False + on_site = True + + return on_site def _reset_timer_with_rest_api(self): # POST to webserver sites to add to representations From 2e6f850b5d9a99d7063d0693414459834a6ba373 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 9 Nov 2022 12:48:52 +0100 Subject: [PATCH 190/409] small updates to docstrings --- openpype/modules/sync_server/sync_server_module.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 6250146523..653ee50541 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -143,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule): 'project_name' must have synchronization enabled (globally or project only) - Used as a API endpoint from outside applications (Loader etc). + Used as an API endpoint from outside applications (Loader etc). Use 'force' to reset existing site. @@ -153,6 +153,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule): site_name (string): name of configured and active site force (bool): reset site if exists priority (int): set priority + reset_timer (bool): if delay timer should be reset, eg. user mark + some representation to be synced manually Throws: SiteAlreadyPresentError - if adding already existing site and @@ -1601,12 +1603,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule): Args: project_name (string): name of project - force to db connection as each file might come from different collection - new_file_id (string): + new_file_id (string): only present if file synced successfully file (dictionary): info about processed file (pulled from DB) representation (dictionary): parent repr of file (from DB) site (string): label ('gdrive', 'S3') error (string): exception message - progress (float): 0-1 of progress of upload/download + progress (float): 0-0.99 of progress of upload/download priority (int): 0-100 set priority Returns: From 5838f0f6097d57201402d9d5d360755f5c54b93c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Wed, 9 Nov 2022 17:09:38 +0100 Subject: [PATCH 191/409] clean --- openpype/client/entities.py | 1 - openpype/modules/sync_server/rest_api.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 91d4b499b0..43afccf2f1 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -6,7 +6,6 @@ that has project name as a context (e.g. on 'ProjectEntity'?). + We will need more specific functions doing wery specific queires really fast. """ -from datetime import datetime import re import collections diff --git a/openpype/modules/sync_server/rest_api.py b/openpype/modules/sync_server/rest_api.py index 51769cd4fb..a7d9dd80b7 100644 --- a/openpype/modules/sync_server/rest_api.py +++ b/openpype/modules/sync_server/rest_api.py @@ -30,8 +30,8 @@ class SyncServerModuleRestApi: self.reset_timer, ) - async def reset_timer(self, request): - # Force timer to run immediately + async def reset_timer(self, _request): + """Force timer to run immediately.""" self.module.reset_timer() return Response(status=200) From c096279cfcd9bdf5124a2e444da2830ebf300d56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 15 Nov 2022 09:06:09 +0100 Subject: [PATCH 192/409] logging format --- .../hooks/pre_copy_last_published_workfile.py | 31 ++++++++++--------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 0e561334e1..44144e5fff 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -75,16 +75,20 @@ class CopyLastPublishedWorkfile(PreLaunchHook): self.log.info( ( "Seems like old version of settings is used." - ' Can\'t access custom templates in host "{}".' - ).format(host_name) + ' Can\'t access custom templates in host "{}".'.format( + host_name + ) + ) ) return elif use_last_published_workfile is False: self.log.info( ( 'Project "{}" has turned off to use last published' - ' workfile as first workfile for host "{}"' - ).format(project_name, host_name) + ' workfile as first workfile for host "{}"'.format( + project_name, host_name + ) + ) ) return @@ -114,8 +118,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook): None, ) if not subset_id: - self.log.debug('No any workfile for asset "{}".').format( - asset_doc["name"] + self.log.debug( + 'No any workfile for asset "{}".'.format(asset_doc["name"]) ) return @@ -131,8 +135,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): ( representation for representation in get_representations( - project_name, - version_ids=[last_version_doc["_id"]] + project_name, version_ids=[last_version_doc["_id"]] ) if representation["context"]["task"]["name"] == task_name ), @@ -141,8 +144,10 @@ class CopyLastPublishedWorkfile(PreLaunchHook): if not workfile_representation: self.log.debug( - 'No published workfile for task "{}" and host "{}".' - ).format(task_name, host_name) + 'No published workfile for task "{}" and host "{}".'.format( + task_name, host_name + ) + ) return local_site_id = get_local_site_id() @@ -152,13 +157,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook): local_site_id, force=True, priority=99, - reset_timer=True + reset_timer=True, ) while not sync_server.is_representation_on_site( - project_name, - workfile_representation["_id"], - local_site_id + project_name, workfile_representation["_id"], local_site_id ): sleep(5) From 460adc767e6b81e62fdab5d2699f0c10c1023e9a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:36:44 +0800 Subject: [PATCH 193/409] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 115 ++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 openpype/hosts/maya/plugins/load/load_abc_to_standin.py diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py new file mode 100644 index 0000000000..defed4bd73 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -0,0 +1,115 @@ +import os +import clique + +from openpype.pipeline import ( + load, + get_representation_path +) +from openpype.settings import get_project_settings + + +class AlembicStandinLoader(load.LoaderPlugin): + """Load Alembic as Arnold Standin""" + + families = ["model", "pointcache"] + representations = ["abc"] + + label = "Import Alembic as Standin" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, options): + + import maya.cmds as cmds + import pymel.core as pm + import mtoa.ui.arnoldmenu + from openpype.hosts.maya.api.pipeline import containerise + from openpype.hosts.maya.api.lib import unique_namespace + + version = context["version"] + version_data = version.get("data", {}) + + self.log.info("version_data: {}\n".format(version_data)) + + frameStart = version_data.get("frameStart", None) + + asset = context["asset"]["name"] + namespace = namespace or unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + #Root group + label = "{}:{}".format(namespace, name) + root = pm.group(name=label, empty=True) + + settings = get_project_settings(os.environ['AVALON_PROJECT']) + colors = settings["maya"]["load"]["colors"] + + c = colors.get('ass') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + + transform_name = label + "_ABC" + + standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) + standin = standinShape.getParent() + standin.rename(transform_name) + + pm.parent(standin, root) + + # Set the standin filepath + standinShape.dso.set(self.fname) + if frameStart is not None: + standinShape.useFrameExtension.set(1) + + nodes = [root, standin] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + import pymel.core as pm + + path = get_representation_path(representation) + + # Update the standin + standins = list() + members = pm.sets(container['objectName'], query=True) + for member in members: + shape = member.getShape() + if (shape and shape.type() == "aiStandIn"): + standins.append(shape) + + for standin in standins: + standin.dso.set(path) + standin.useFrameExtension.set(1) + + container = pm.PyNode(container["objectName"]) + container.representation.set(str(representation["_id"])) + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + import maya.cmds as cmds + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass From ecbe06bdc8aeee163072f6173b96ba2886b0ebb1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 31 Oct 2022 19:56:10 +0800 Subject: [PATCH 194/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index defed4bd73..f39aa56650 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,5 +1,4 @@ import os -import clique from openpype.pipeline import ( load, @@ -41,7 +40,7 @@ class AlembicStandinLoader(load.LoaderPlugin): suffix="_", ) - #Root group + # Root group label = "{}:{}".format(namespace, name) root = pm.group(name=label, empty=True) From 2e6974b0640d0ad43bbacb2163a1cf85a7933522 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:36:58 +0800 Subject: [PATCH 195/409] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index f39aa56650..68aeb24069 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -13,7 +13,7 @@ class AlembicStandinLoader(load.LoaderPlugin): families = ["model", "pointcache"] representations = ["abc"] - label = "Import Alembic as Standin" + label = "Import Alembic as Arnold Standin" order = -5 icon = "code-fork" color = "orange" @@ -21,7 +21,6 @@ class AlembicStandinLoader(load.LoaderPlugin): def load(self, context, name, namespace, options): import maya.cmds as cmds - import pymel.core as pm import mtoa.ui.arnoldmenu from openpype.hosts.maya.api.pipeline import containerise from openpype.hosts.maya.api.lib import unique_namespace @@ -42,7 +41,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Root group label = "{}:{}".format(namespace, name) - root = pm.group(name=label, empty=True) + root = cmds.group(name=label, empty=True) settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] @@ -55,16 +54,17 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" - standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn()) - standin = standinShape.getParent() - standin.rename(transform_name) + standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] + standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.rename(standin, transform_name) + standinShape = cmds.listRelatives(standin, children=True)[0] - pm.parent(standin, root) + cmds.parent(standin, root) # Set the standin filepath - standinShape.dso.set(self.fname) + cmds.setAttr(standinShape + ".dso", self.fname, type="string") if frameStart is not None: - standinShape.useFrameExtension.set(1) + cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] self[:] = nodes From ce5d4c02fa7f31c7731a04f5580e52f042933353 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 1 Nov 2022 20:38:51 +0800 Subject: [PATCH 196/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 68aeb24069..5d6c52eac9 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -55,7 +55,8 @@ class AlembicStandinLoader(load.LoaderPlugin): transform_name = label + "_ABC" standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0] - standin = cmds.listRelatives(standinShape, parent=True, typ="transform") + standin = cmds.listRelatives(standinShape, parent=True, + typ="transform") standin = cmds.rename(standin, transform_name) standinShape = cmds.listRelatives(standin, children=True)[0] From c58ef40f15c405c615cb5d4ec8a566a00de5c2eb Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 00:32:22 +0800 Subject: [PATCH 197/409] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 5d6c52eac9..94bb974917 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -10,7 +10,7 @@ from openpype.settings import get_project_settings class AlembicStandinLoader(load.LoaderPlugin): """Load Alembic as Arnold Standin""" - families = ["model", "pointcache"] + families = ["animation", "model", "pointcache"] representations = ["abc"] label = "Import Alembic as Arnold Standin" @@ -31,6 +31,7 @@ class AlembicStandinLoader(load.LoaderPlugin): self.log.info("version_data: {}\n".format(version_data)) frameStart = version_data.get("frameStart", None) + frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -64,7 +65,13 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - if frameStart is not None: + cmds.setAttr(standinShape + ".abcFPS", 25) + + if frameStart is None: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: + cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) nodes = [root, standin] @@ -93,7 +100,8 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(1) + standin.useFrameExtension.set(0) + standin.abcFPS.set(25) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 031465779bc4096a2848a545b52c37a44a010128 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 9 Nov 2022 08:39:52 +0800 Subject: [PATCH 198/409] Alembic Loader as Arnold Standin --- .../hosts/maya/plugins/load/load_abc_to_standin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 94bb974917..19e60d33da 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -1,6 +1,7 @@ import os from openpype.pipeline import ( + legacy_io, load, get_representation_path ) @@ -46,6 +47,7 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] + fps = legacy_io.Session["AVALON_FPS"] c = colors.get('ass') if c is not None: @@ -65,12 +67,14 @@ class AlembicStandinLoader(load.LoaderPlugin): # Set the standin filepath cmds.setAttr(standinShape + ".dso", self.fname, type="string") - cmds.setAttr(standinShape + ".abcFPS", 25) + cmds.setAttr(standinShape + ".abcFPS", float(fps)) if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) + elif frameStart == 1 and frameEnd == 1: cmds.setAttr(standinShape + ".useFrameExtension", 0) + else: cmds.setAttr(standinShape + ".useFrameExtension", 1) @@ -89,7 +93,7 @@ class AlembicStandinLoader(load.LoaderPlugin): import pymel.core as pm path = get_representation_path(representation) - + fps = legacy_io.Session["AVALON_FPS"] # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) @@ -101,7 +105,7 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) standin.useFrameExtension.set(0) - standin.abcFPS.set(25) + standin.abcFPS.set(float(fps)) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 6e94a81393884cab2c3b2798e2c765c08617c4d1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:09:13 +0800 Subject: [PATCH 199/409] Alembic Loader as Arnold Standin --- .../maya/plugins/load/load_abc_to_standin.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 19e60d33da..a192d9c357 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -28,11 +28,10 @@ class AlembicStandinLoader(load.LoaderPlugin): version = context["version"] version_data = version.get("data", {}) - + family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) - frameEnd = version_data.get("frameEnd", None) asset = context["asset"]["name"] namespace = namespace or unique_namespace( @@ -48,12 +47,14 @@ class AlembicStandinLoader(load.LoaderPlugin): settings = get_project_settings(os.environ['AVALON_PROJECT']) colors = settings["maya"]["load"]["colors"] fps = legacy_io.Session["AVALON_FPS"] - - c = colors.get('ass') + c = colors.get(family[0]) if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - c[0], c[1], c[2]) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" @@ -72,7 +73,7 @@ class AlembicStandinLoader(load.LoaderPlugin): if frameStart is None: cmds.setAttr(standinShape + ".useFrameExtension", 0) - elif frameStart == 1 and frameEnd == 1: + elif "model" in family: cmds.setAttr(standinShape + ".useFrameExtension", 0) else: From 66608300969101ee75cf68a8c7a86dfd7b7710d4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:10:41 +0800 Subject: [PATCH 200/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index a192d9c357..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,7 +30,6 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) - self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -51,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From 7c3f625fe324328ed7e7a93e4adff4da7c1d6e8e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:11:56 +0800 Subject: [PATCH 201/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From bee7b17ff2b612fab87c95cdfa4143659453d049 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:13:04 +0800 Subject: [PATCH 202/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..dafe999d9d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From 3649ee7e4e163f45472c13f5f4bb74a65175e979 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:15:11 +0800 Subject: [PATCH 203/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index dafe999d9d..d93c85f8a4 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,9 +50,9 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) ) transform_name = label + "_ABC" From cd27df0e8d35ccb94a4cfde09f399cbb6319a1c4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:16:10 +0800 Subject: [PATCH 204/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index d93c85f8a4..8ce1aee3ac 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -50,10 +50,10 @@ class AlembicStandinLoader(load.LoaderPlugin): if c is not None: cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + (float(c[0])/255), + (float(c[1])/255), + (float(c[2])/255) + ) transform_name = label + "_ABC" From 3186acc83e967f726ceec6c975fc74d6ea6cd8a2 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 19:19:11 +0800 Subject: [PATCH 205/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 8ce1aee3ac..9583063c7e 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -30,6 +30,7 @@ class AlembicStandinLoader(load.LoaderPlugin): version_data = version.get("data", {}) family = version["data"]["families"] self.log.info("version_data: {}\n".format(version_data)) + self.log.info("family: {}\n".format(family)) frameStart = version_data.get("frameStart", None) asset = context["asset"]["name"] @@ -48,12 +49,12 @@ class AlembicStandinLoader(load.LoaderPlugin): fps = legacy_io.Session["AVALON_FPS"] c = colors.get(family[0]) if c is not None: + r = (float(c[0]) / 255) + g = (float(c[1]) / 255) + b = (float(c[2]) / 255) cmds.setAttr(root + ".useOutlinerColor", 1) cmds.setAttr(root + ".outlinerColor", - (float(c[0])/255), - (float(c[1])/255), - (float(c[2])/255) - ) + r, g, b) transform_name = label + "_ABC" From cf8bd8eb59590df3b2a196d68bbb47e29fcd862f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 17:57:32 +0800 Subject: [PATCH 206/409] Alembic Loader as Arnold Standin --- openpype/hosts/maya/plugins/load/load_abc_to_standin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 9583063c7e..605a492e4d 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -98,6 +98,7 @@ class AlembicStandinLoader(load.LoaderPlugin): # Update the standin standins = list() members = pm.sets(container['objectName'], query=True) + self.log.info("container:{}".format(container)) for member in members: shape = member.getShape() if (shape and shape.type() == "aiStandIn"): @@ -105,8 +106,11 @@ class AlembicStandinLoader(load.LoaderPlugin): for standin in standins: standin.dso.set(path) - standin.useFrameExtension.set(0) standin.abcFPS.set(float(fps)) + if "modelMain" in container['objectName']: + standin.useFrameExtension.set(0) + else: + standin.useFrameExtension.set(1) container = pm.PyNode(container["objectName"]) container.representation.set(str(representation["_id"])) From 93bff0c038c262290c5d8e0b5e28847c3a210777 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 16 Nov 2022 03:40:10 +0000 Subject: [PATCH 207/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 1953d0d6a5..268f33083a 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.4" +__version__ = "3.14.7-nightly.5" From 35b43f34ebbea13407154445369a4f8cdb15cf78 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 23:23:26 +0000 Subject: [PATCH 208/409] Bump loader-utils from 1.4.1 to 1.4.2 in /website Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a3802..220a489dfa 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From ece1e8b9137d5a95d412b42b0e2b2fc5b4a9176a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 14:51:10 +0100 Subject: [PATCH 209/409] OP-4394 - Hound --- .../webpublisher/plugins/publish/collect_published_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 181f8b4ab7..79ed499a20 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -253,7 +253,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin): if ext: ext = ext.lower() if ext.startswith("."): - ext = ext[1:] + ext = ext[1:] lower_extensions.add(ext) # all extensions setting From 4b95ad68168b138070171c862e0afaf4c08fb9f0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 16 Nov 2022 15:57:30 +0100 Subject: [PATCH 210/409] OP-4394 - use lowercased extension in ExtractReview There might be uppercased extension sent in by accident (.PNG), which would make all checks against set of extension not work. --- openpype/plugins/publish/extract_review.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1f9b30fba3..982bd9dc24 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -152,7 +152,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if input_ext.startswith("."): input_ext = input_ext[1:] - if input_ext not in self.supported_exts: + if input_ext.lower() not in self.supported_exts: self.log.info( "Representation has unsupported extension \"{}\"".format( input_ext @@ -179,7 +179,7 @@ class ExtractReview(pyblish.api.InstancePlugin): single_frame_image = False if len(input_filepaths) == 1: ext = os.path.splitext(input_filepaths[0])[-1] - single_frame_image = ext in IMAGE_EXTENSIONS + single_frame_image = ext.lower() in IMAGE_EXTENSIONS filtered_defs = [] for output_def in output_defs: @@ -501,7 +501,7 @@ class ExtractReview(pyblish.api.InstancePlugin): first_sequence_frame += handle_start ext = os.path.splitext(repre["files"][0])[1].replace(".", "") - if ext in self.alpha_exts: + if ext.lower() in self.alpha_exts: input_allow_bg = True return { @@ -934,6 +934,8 @@ class ExtractReview(pyblish.api.InstancePlugin): if output_ext.startswith("."): output_ext = output_ext[1:] + output_ext = output_ext.lower() + # Store extension to representation new_repre["ext"] = output_ext From 5c37d91138332442fa1d746003f8b16a7e623f2e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 17:20:44 +0100 Subject: [PATCH 211/409] uncomment subimages because multipart exr is created which actually can't ffmpeg handle --- openpype/lib/transcoding.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 5a57026496..6f571ea522 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -476,7 +476,7 @@ def convert_for_ffmpeg( if input_frame_start is not None and input_frame_end is not None: is_sequence = int(input_frame_end) != int(input_frame_start) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -524,10 +524,8 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) # Add frame definitions to arguments @@ -621,7 +619,7 @@ def convert_input_paths_for_ffmpeg( " \".exr\" extension. Got \"{}\"." ).format(ext)) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -639,6 +637,7 @@ def convert_input_paths_for_ffmpeg( red, green, blue, alpha = review_channels input_channels = [red, green, blue] + # TODO find subimage inder where rgba is available for multipart exrs channels_arg = "R={},G={},B={}".format(red, green, blue) if alpha is not None: channels_arg += ",A={}".format(alpha) @@ -671,11 +670,8 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed - # with oiiotool # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From c028bb2a9446f5a7891a7a42427a62aa0f3a0886 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Wed, 16 Nov 2022 18:37:24 +0100 Subject: [PATCH 212/409] Update openpype/client/entities.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/client/entities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index bbef8dc65e..38d6369d09 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,7 +389,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - Union[str, Dict]: None if subset with specified filters was not found. + Union[None, Dict[str, Any]]: None if subset with specified filters was not found. or dict subset document which can be reduced to specified 'fields'. From 24da47332bbd0951acd621fbb54274153a8a1e02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:56:18 +0100 Subject: [PATCH 213/409] :bug: fix representation creation --- .../traypublisher/plugins/publish/collect_online_file.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 459ee463aa..82c4870fe4 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -13,12 +13,11 @@ class CollectOnlineFile(pyblish.api.InstancePlugin): def process(self, instance): file = Path(instance.data["creator_attributes"]["path"]) - if not instance.data.get("representations"): - instance.data["representations"] = [ - { + instance.data["representations"].append( + { "name": file.suffix.lstrip("."), "ext": file.suffix.lstrip("."), "files": file.name, "stagingDir": file.parent.as_posix() - } - ] + } + ) From 45c6a9ab93a8c5ae0b830190eaabd559d8c369b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:56:36 +0100 Subject: [PATCH 214/409] :recycle: refactor code --- .../plugins/create/create_online.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py index 5a6373730d..19f956a50e 100644 --- a/openpype/hosts/traypublisher/plugins/create/create_online.py +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -38,23 +38,31 @@ class OnlineCreator(TrayPublishCreator): return "fa.file" def create(self, subset_name, instance_data, pre_create_data): - if not pre_create_data.get("representation_file")["filenames"]: + repr_file = pre_create_data.get("representation_file") + if not repr_file: raise CreatorError("No files specified") - asset = get_asset_by_name(self.project_name, instance_data["asset"]) - origin_basename = Path(pre_create_data.get( - "representation_file")["filenames"][0]).stem + files = repr_file.get("filenames") + if not files: + # this should never happen + raise CreatorError("Missing files from representation") + origin_basename = Path(files[0]).stem + + asset = get_asset_by_name( + self.project_name, instance_data["asset"], fields=["_id"]) if get_subset_by_name( - self.project_name, origin_basename, asset["_id"]): + self.project_name, origin_basename, asset["_id"], + fields=["_id"]): raise CreatorError(f"subset with {origin_basename} already " "exists in selected asset") instance_data["originalBasename"] = origin_basename subset_name = origin_basename - path = (Path(pre_create_data.get("representation_file")["directory"]) / pre_create_data.get("representation_file")["filenames"][0]).as_posix() # noqa - instance_data["creator_attributes"] = {"path": path} + instance_data["creator_attributes"] = { + "path": (Path(repr_file["directory"]) / files[0]).as_posix() + } # Create new instance new_instance = CreatedInstance(self.family, subset_name, From 3357392e71c5a1b53747d56c3430897f68a8995b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 16 Nov 2022 18:59:21 +0100 Subject: [PATCH 215/409] :rotating_light: fix :dog: --- openpype/client/entities.py | 4 ++-- .../traypublisher/plugins/publish/collect_online_file.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 38d6369d09..c415be8816 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,8 +389,8 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - Union[None, Dict[str, Any]]: None if subset with specified filters was not found. - or dict subset document which can be reduced to + Union[None, Dict[str, Any]]: None if subset with specified filters was + not found or dict subset document which can be reduced to specified 'fields'. """ diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py index 82c4870fe4..a3f86afa13 100644 --- a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -15,9 +15,9 @@ class CollectOnlineFile(pyblish.api.InstancePlugin): instance.data["representations"].append( { - "name": file.suffix.lstrip("."), - "ext": file.suffix.lstrip("."), - "files": file.name, - "stagingDir": file.parent.as_posix() + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() } ) From 64e5af230a3509ef16d8c0ee0fc826284960232b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Nov 2022 12:54:50 +0100 Subject: [PATCH 216/409] OP-4394 - removed explicit lower from repre ext to not shadow upper case issue Using lower here would hide possibly broken representation, as we would expect both repre["ext"] and repre["name"] be lowercased. In case the aren't review won't get created >> someone will notice and fix issues on source representation. --- openpype/plugins/publish/extract_review.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 982bd9dc24..f299d1c6e9 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -152,7 +152,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if input_ext.startswith("."): input_ext = input_ext[1:] - if input_ext.lower() not in self.supported_exts: + if input_ext not in self.supported_exts: self.log.info( "Representation has unsupported extension \"{}\"".format( input_ext From 69ddc20e3c4003db2285d2095f45c0e585cae001 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 14:34:42 +0100 Subject: [PATCH 217/409] include secrets module to python_2 vendor --- .../vendor/python/python_2/secrets/LICENSE | 21 +++ .../python/python_2/secrets/__init__.py | 16 +++ .../vendor/python/python_2/secrets/secrets.py | 132 ++++++++++++++++++ 3 files changed, 169 insertions(+) create mode 100644 openpype/vendor/python/python_2/secrets/LICENSE create mode 100644 openpype/vendor/python/python_2/secrets/__init__.py create mode 100644 openpype/vendor/python/python_2/secrets/secrets.py diff --git a/openpype/vendor/python/python_2/secrets/LICENSE b/openpype/vendor/python/python_2/secrets/LICENSE new file mode 100644 index 0000000000..d3211e4d9f --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Scaleway + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/openpype/vendor/python/python_2/secrets/__init__.py b/openpype/vendor/python/python_2/secrets/__init__.py new file mode 100644 index 0000000000..c29ee61be1 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + + +__version__ = "1.0.6" + +# Emulates __all__ for Python2 +from .secrets import ( + choice, + randbelow, + randbits, + SystemRandom, + token_bytes, + token_hex, + token_urlsafe, + compare_digest +) diff --git a/openpype/vendor/python/python_2/secrets/secrets.py b/openpype/vendor/python/python_2/secrets/secrets.py new file mode 100644 index 0000000000..967d2862d9 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/secrets.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""Generate cryptographically strong pseudo-random numbers suitable for + +managing secrets such as account authentication, tokens, and similar. + + +See PEP 506 for more information. + +https://www.python.org/dev/peps/pep-0506/ + + +""" + + +__all__ = ['choice', 'randbelow', 'randbits', 'SystemRandom', + + 'token_bytes', 'token_hex', 'token_urlsafe', + + 'compare_digest', + + ] + +import os +import sys +from random import SystemRandom + +import base64 + +import binascii + + +# hmac.compare_digest did appear in python 2.7.7 +if sys.version_info >= (2, 7, 7): + from hmac import compare_digest +else: + # If we use an older python version, we will define an equivalent method + def compare_digest(a, b): + """Compatibility compare_digest method for python < 2.7. + This method is NOT cryptographically secure and may be subject to + timing attacks, see https://docs.python.org/2/library/hmac.html + """ + return a == b + + +_sysrand = SystemRandom() + + +randbits = _sysrand.getrandbits + +choice = _sysrand.choice + + +def randbelow(exclusive_upper_bound): + + """Return a random int in the range [0, n).""" + + if exclusive_upper_bound <= 0: + + raise ValueError("Upper bound must be positive.") + + return _sysrand._randbelow(exclusive_upper_bound) + + +DEFAULT_ENTROPY = 32 # number of bytes to return by default + + +def token_bytes(nbytes=None): + + """Return a random byte string containing *nbytes* bytes. + + + If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_bytes(16) #doctest:+SKIP + + b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + + + """ + + if nbytes is None: + + nbytes = DEFAULT_ENTROPY + + return os.urandom(nbytes) + + +def token_hex(nbytes=None): + + """Return a random text string, in hexadecimal. + + + The string has *nbytes* random bytes, each byte converted to two + + hex digits. If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_hex(16) #doctest:+SKIP + + 'f9bf78b9a18ce6d46a0cd2b0b86df9da' + + + """ + + return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + + +def token_urlsafe(nbytes=None): + + """Return a random URL-safe text string, in Base64 encoding. + + + The string has *nbytes* random bytes. If *nbytes* is ``None`` + + or not supplied, a reasonable default is used. + + + >>> token_urlsafe(16) #doctest:+SKIP + + 'Drmhze6EPcv0fN_81Bj-nA' + + + """ + + tok = token_bytes(nbytes) + + return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') From 64a1e55170153504fcb4ff892a8030bc14ef034f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Nov 2022 15:59:58 +0100 Subject: [PATCH 218/409] enhance speed of collect audio by converting it to context plugin --- openpype/plugins/publish/collect_audio.py | 175 +++++++++++++++------- 1 file changed, 124 insertions(+), 51 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7d53b24e54..db567f8b8f 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,21 +1,27 @@ +import collections import pyblish.api from openpype.client import ( - get_last_version_by_subset_name, + get_assets, + get_subsets, + get_last_versions, get_representations, ) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.pipeline import get_representation_path_with_anatomy -class CollectAudio(pyblish.api.InstancePlugin): +class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. The audio subset name searched for is defined in: project settings > Collect Audio + + Note: + The plugin was instance plugin but because of so much queries the + plugin was slowing down whole collection phase a lot thus was + converted to context plugin which requires only 4 queries top. """ + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" - def process(self, instance): - if instance.data.get("audio"): - self.log.info( - "Skipping Audio collecion. It is already collected" - ) + def process(self, context): + # Fake filtering by family inside context plugin + filtered_instances = [] + for instance in pyblish.api.instances_by_plugin( + context, self.__class__ + ): + # Skip instances that already have audio filled + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + continue + filtered_instances.append(instance) + + # Skip if none of instances remained + if not filtered_instances: return # Add audio to instance if exists. + instances_by_asset_name = collections.defaultdict(list) + for instance in filtered_instances: + asset_name = instance.data["asset"] + instances_by_asset_name[asset_name].append(instance) + + asset_names = set(instances_by_asset_name.keys()) self.log.info(( - "Searching for audio subset '{subset}'" - " in asset '{asset}'" + "Searching for audio subset '{subset}' in assets {assets}" ).format( subset=self.audio_subset_name, - asset=instance.data["asset"] + assets=", ".join([ + '"{}"'.format(asset_name) + for asset_name in asset_names + ]) )) - repre_doc = self._get_repre_doc(instance) + # Query all required documents + project_name = context.data["projectName"] + anatomy = context.data["anatomy"] + repre_docs_by_asset_names = self.query_representations( + project_name, asset_names) - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") + for asset_name, instances in instances_by_asset_name.items(): + repre_docs = repre_docs_by_asset_names[asset_name] + if not repre_docs: + continue - def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio") - if cache is None: - cache = {} - instance.context.data["__cache_asset_audio"] = cache - asset_name = instance.data["asset"] + repre_doc = repre_docs[0] + repre_path = get_representation_path_with_anatomy( + repre_doc, anatomy + ) + for instance in instances: + instance.data["audio"] = [{ + "offset": 0, + "filename": repre_path + }] + self.log.info("Audio Data added to instance ...") - # first try to get it from cache - if asset_name in cache: - return cache[asset_name] + def query_representations(self, project_name, asset_names): + """Query representations related to audio subsets for passed assets. - project_name = legacy_io.active_project() + Args: + project_name (str): Project in which we're looking for all + entities. + asset_names (Iterable[str]): Asset names where to look for audio + subsets and their representations. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( + Returns: + collections.defaultdict[str, List[Dict[Str, Any]]]: Representations + related to audio subsets by asset name. + """ + + output = collections.defaultdict(list) + # Query asset documents + asset_docs = get_assets( project_name, - self.audio_subset_name, - asset_name=asset_name, + asset_names=asset_names, fields=["_id"] ) - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] + asset_id_by_name = {} + for asset_doc in asset_docs: + asset_id_by_name[asset_doc["name"]] = asset_doc["_id"] + asset_ids = set(asset_id_by_name.values()) - # update cache - cache[asset_name] = repre_doc + # Query subsets with name define by 'audio_subset_name' attr + # - one or none subsets with the name should be available on an asset + subset_docs = get_subsets( + project_name, + subset_names=[self.audio_subset_name], + asset_ids=asset_ids, + fields=["_id", "parent"] + ) + subset_id_by_asset_id = {} + for subset_doc in subset_docs: + asset_id = subset_doc["parent"] + subset_id_by_asset_id[asset_id] = subset_doc["_id"] - return repre_doc + subset_ids = set(subset_id_by_asset_id.values()) + if not subset_ids: + return output + + # Find all latest versions for the subsets + version_docs_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) + version_id_by_subset_id = { + subset_id: version_doc["_id"] + for subset_id, version_doc in version_docs_by_subset_id.items() + } + version_ids = set(version_id_by_subset_id.values()) + if not version_ids: + return output + + # Find representations under latest versions of audio subsets + repre_docs = get_representations( + project_name, version_ids=version_ids + ) + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + repre_docs_by_version_id[version_id].append(repre_doc) + + if not repre_docs_by_version_id: + return output + + for asset_name in asset_names: + asset_id = asset_id_by_name.get(asset_name) + subset_id = subset_id_by_asset_id.get(asset_id) + version_id = version_id_by_subset_id.get(subset_id) + output[asset_name] = repre_docs_by_version_id[version_id] + return output From 2db4cc43aae80fa8fb203ba775fff6fbe19a23c0 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 18 Nov 2022 17:52:57 +0100 Subject: [PATCH 219/409] Fix - typo --- openpype/hooks/pre_copy_last_published_workfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 44144e5fff..26b43c39cb 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -38,7 +38,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook): sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: - self.log.deubg("Sync server module is not enabled or available") + self.log.debug("Sync server module is not enabled or available") return # Check there is no workfile available From d076de0d077197bf3afc64edf9ab08837f2db549 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Nov 2022 18:47:37 +0100 Subject: [PATCH 220/409] add more information about where ftrack service is storing versions or where is looking for versions --- openpype/modules/ftrack/scripts/sub_event_status.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 6c7ecb8351..eb3f63c04b 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -7,6 +7,8 @@ import signal import socket import datetime +import appdirs + import ftrack_api from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( @@ -253,6 +255,15 @@ class StatusFactory: ) }) + items.append({ + "type": "label", + "value": ( + "Local versions dir: {}
Version repository path: {}" + ).format( + appdirs.user_data_dir("openpype", "pypeclub"), + os.environ.get("OPENPYPE_PATH") + ) + }) items.append({"type": "label", "value": "---"}) return items From 996cf3dcf95cd5042b2433780406ec5e74f1ae30 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 20:52:55 +0100 Subject: [PATCH 221/409] Nuke: load image first frame --- openpype/hosts/nuke/plugins/load/load_image.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b..3c5d4a7fc1 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -62,7 +62,9 @@ class LoadImage(load.LoaderPlugin): def load(self, context, name, namespace, options): self.log.info("__ options: `{}`".format(options)) - frame_number = options.get("frame_number", 1) + frame_number = options.get( + "frame_number", int(nuke.root()["first_frame"].getValue()) + ) version = context['version'] version_data = version.get("data", {}) From 8d1e720a889ebabc985505f0165ec11c4d6f7342 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 21:19:23 +0100 Subject: [PATCH 222/409] Nuke: reset tab to first native tab --- openpype/hosts/nuke/api/pipeline.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c343c635fa..fb707ca44c 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -364,6 +364,9 @@ def containerise(node, set_avalon_knob_data(node, data) + # set tab to first native + node.setTab(0) + return node From c06f6891e8b021ab6e67f70a080202161059d8e8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 18 Nov 2022 21:19:46 +0100 Subject: [PATCH 223/409] nuke: close property panel after node creation --- openpype/hosts/nuke/plugins/load/load_camera_abc.py | 3 +++ openpype/hosts/nuke/plugins/load/load_clip.py | 3 +++ openpype/hosts/nuke/plugins/load/load_effects.py | 3 +++ openpype/hosts/nuke/plugins/load/load_effects_ip.py | 3 +++ openpype/hosts/nuke/plugins/load/load_image.py | 4 ++++ openpype/hosts/nuke/plugins/load/load_model.py | 4 ++++ openpype/hosts/nuke/plugins/load/load_script_precomp.py | 3 +++ 7 files changed, 23 insertions(+) diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index f5dfc8c0ab..9fef7424c8 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -65,6 +65,9 @@ class AlembicCameraLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + # hide property panel + camera_node.hideControlPanel() + camera_node.forceValidate() camera_node["frame_rate"].setValue(float(fps)) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b17356c5c7..565d777811 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -145,6 +145,9 @@ class LoadClip(plugin.NukeLoader): "Read", "name {}".format(read_name)) + # hide property panel + read_node.hideControlPanel() + # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index d164e0604c..cef4b0a5fc 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -89,6 +89,9 @@ class LoadEffects(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 44565c139d..9bd40be816 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -90,6 +90,9 @@ class LoadEffectsInputProcess(load.LoaderPlugin): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b..f7ce20eee9 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -112,6 +112,10 @@ class LoadImage(load.LoaderPlugin): r = nuke.createNode( "Read", "name {}".format(read_name)) + + # hide property panel + r.hideControlPanel() + r["file"].setValue(file) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 151401bad3..ad985e83c6 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -63,6 +63,10 @@ class AlembicModelLoader(load.LoaderPlugin): object_name, file), inpanel=False ) + + # hide property panel + model_node.hideControlPanel() + model_node.forceValidate() # Ensure all items are imported and selected. diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 21e384b538..f0972f85d2 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -71,6 +71,9 @@ class LinkAsGroup(load.LoaderPlugin): "Precomp", "file {}".format(file)) + # hide property panel + P.hideControlPanel() + # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) From 554b3b256c4bfb368bb808376088e3315df54127 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 19 Nov 2022 03:37:23 +0000 Subject: [PATCH 224/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 268f33083a..0116b49f4d 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.5" +__version__ = "3.14.7-nightly.6" From 33974c39d4aac0bb28ebd87e007acc166b8cd003 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 19 Nov 2022 17:13:50 +0800 Subject: [PATCH 225/409] aov Filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 58fcd2d281..6fde0df162 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1110,7 +1110,7 @@ class RenderProductsRedshift(ARenderProducts): if light_groups_enabled: return products - beauty_name = "Beauty_other" if has_beauty_aov else "" + beauty_name = "BeautyAux" if has_beauty_aov else "" for camera in cameras: products.insert(0, RenderProduct(productName=beauty_name, From 94939e431a18cc45472276f01f96c71e5187dfc8 Mon Sep 17 00:00:00 2001 From: clement hector Date: Mon, 21 Nov 2022 16:18:26 +0100 Subject: [PATCH 226/409] rename families_to_upload to families_to_review + define it as class attribute --- openpype/hosts/tvpaint/plugins/publish/extract_sequence.py | 3 ++- openpype/settings/defaults/project_settings/tvpaint.json | 5 ++--- .../schemas/projects_schema/schema_project_tvpaint.json | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index d8aef1ab6b..7d2e9c6f25 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -19,6 +19,7 @@ class ExtractSequence(pyblish.api.Extractor): label = "Extract Sequence" hosts = ["tvpaint"] families = ["review", "renderPass", "renderLayer", "renderScene"] + families_to_review = ["review"] # Modifiable with settings review_bg = [255, 255, 255, 255] @@ -129,7 +130,7 @@ class ExtractSequence(pyblish.api.Extractor): # Fill tags and new families from project settings tags = [] - if family_lowered in self.families_to_upload: + if family_lowered in self.families_to_review: tags.append("review") # Sequence of one frame diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 2e413f50cd..9ccc318d70 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -12,11 +12,10 @@ 255, 255 ], - "families_to_upload": [ + "families_to_review": [ "review", "renderpass", - "renderlayer", - "renderscene" + "renderlayer" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 0392c9089b..61342ef738 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -59,8 +59,8 @@ }, { "type": "enum", - "key": "families_to_upload", - "label": "Families to upload", + "key": "families_to_review", + "label": "Families to review", "multiselection": true, "enum_items": [ {"review": "review"}, From cdb91c03795db7bc9b249e69dd605769562c11bc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:56:02 +0100 Subject: [PATCH 227/409] Added helper class for version resolving and sorting --- .../custom/plugins/GlobalJobPreLoad.py | 131 ++++++++++++++++++ 1 file changed, 131 insertions(+) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 9b35c9502d..6c3dd092fe 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -14,6 +14,137 @@ from Deadline.Scripting import ( ProcessUtils, ) +VERSION_REGEX = re.compile( + r"(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"(?:-(?P[a-zA-Z\d\-.]*))?" + r"(?:\+(?P[a-zA-Z\d\-.]*))?" +) + + +class OpenPypeVersion: + """Fake semver version class for OpenPype version purposes. + + The version + """ + def __init__(self, major, minor, patch, prerelease, origin=None): + self.major = major + self.minor = minor + self.patch = patch + self.prerelease = prerelease + + is_valid = True + if not major or not minor or not patch: + is_valid = False + self.is_valid = is_valid + + if origin is None: + base = "{}.{}.{}".format(str(major), str(minor), str(patch)) + if not prerelease: + origin = base + else: + origin = "{}-{}".format(base, str(prerelease)) + + self.origin = origin + + @classmethod + def from_string(cls, version): + """Create an object of version from string. + + Args: + version (str): Version as a string. + + Returns: + Union[OpenPypeVersion, None]: Version object if input is nonempty + string otherwise None. + """ + + if not version: + return None + valid_parts = VERSION_REGEX.findall(version) + if len(valid_parts) != 1: + # Return invalid version with filled 'origin' attribute + return cls(None, None, None, None, origin=str(version)) + + # Unpack found version + major, minor, patch, pre, post = valid_parts[0] + prerelease = pre + # Post release is not important anymore and should be considered as + # part of prerelease + # - comparison is implemented to find suitable build and builds should + # never contain prerelease part so "not proper" parsing is + # acceptable for this use case. + if post: + prerelease = "{}+{}".format(pre, post) + + return cls( + int(major), int(minor), int(patch), prerelease, origin=version + ) + + def has_compatible_release(self, other): + """Version has compatible release as other version. + + Both major and minor versions must be exactly the same. In that case + a build can be considered as release compatible with any version. + + Args: + other (OpenPypeVersion): Other version. + + Returns: + bool: Version is release compatible with other version. + """ + + if self.is_valid and other.is_valid: + return self.major == other.major and self.minor == other.minor + return False + + def __bool__(self): + return self.is_valid + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.origin) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return self.origin == other + return self.origin == other.origin + + def __lt__(self, other): + if not isinstance(other, self.__class__): + return None + + if not self.is_valid: + return True + + if not other.is_valid: + return False + + if self.origin == other.origin: + return None + + same_major = self.major == other.major + if not same_major: + return self.major < other.major + + same_minor = self.minor == other.minor + if not same_minor: + return self.minor < other.minor + + same_patch = self.patch == other.patch + if not same_patch: + return self.patch < other.patch + + if not self.prerelease: + return False + + if not other.prerelease: + return True + + pres = [self.prerelease, other.prerelease] + pres.sort() + return pres[0] == self.prerelease + def get_openpype_version_from_path(path, build=True): """Get OpenPype version from provided path. From b1e899d8ee2a79cd673bdf14bf4adf2134443dca Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:57:10 +0100 Subject: [PATCH 228/409] Use full version for resolving and use specific build if matches requested version --- .../custom/plugins/GlobalJobPreLoad.py | 197 ++++++++++-------- 1 file changed, 110 insertions(+), 87 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 6c3dd092fe..375cf48b8f 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -152,9 +152,9 @@ def get_openpype_version_from_path(path, build=True): build (bool, optional): Get only builds, not sources Returns: - str or None: version of OpenPype if found. - + Union[OpenPypeVersion, None]: version of OpenPype if found. """ + # fix path for application bundle on macos if platform.system().lower() == "darwin": path = os.path.join(path, "Contents", "MacOS", "lib", "Python") @@ -177,8 +177,10 @@ def get_openpype_version_from_path(path, build=True): with open(version_file, "r") as vf: exec(vf.read(), version) - version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) - return version_match[1] + version_str = version.get("__version__") + if version_str: + return OpenPypeVersion.from_string(version_str) + return None def get_openpype_executable(): @@ -190,6 +192,91 @@ def get_openpype_executable(): return exe_list, dir_list +def get_openpype_versions(exe_list, dir_list): + print(">>> Getting OpenPype executable ...") + openpype_versions = [] + + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if install_dir: + print("--- Looking for OpenPype at: {}".format(install_dir)) + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + print(" - found: {} - {}".format(version, subdir)) + openpype_versions.append((version, subdir)) + return openpype_versions + + +def get_requested_openpype_executable( + exe, dir_list, requested_version +): + requested_version_obj = OpenPypeVersion.from_string(requested_version) + if not requested_version_obj: + print(( + ">>> Requested version does not match version regex \"{}\"" + ).format(VERSION_REGEX)) + return None + + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) + openpype_versions = get_openpype_versions(dir_list) + if not openpype_versions: + return None + + # if looking for requested compatible version, + # add the implicitly specified to the list too. + if exe: + exe_dir = os.path.dirname(exe) + print("Looking for OpenPype at: {}".format(exe_dir)) + version = get_openpype_version_from_path(exe_dir) + if version: + print(" - found: {} - {}".format(version, exe_dir)) + openpype_versions.append((version, exe_dir)) + + matching_item = None + compatible_versions = [] + for version_item in openpype_versions: + version, version_dir = version_item + if requested_version_obj.has_compatible_release(version): + compatible_versions.append(version_item) + if version == requested_version_obj: + # Store version item if version match exactly + # - break if is found matching version + matching_item = version_item + break + + if not compatible_versions: + return None + + compatible_versions.sort(key=lambda item: item[0]) + if matching_item: + version, version_dir = matching_item + print(( + "*** Found exact match build version {} in {}" + ).format(version_dir, version)) + + else: + version, version_dir = compatible_versions[-1] + + print(( + "*** Latest compatible version found is {} in {}" + ).format(version_dir, version)) + + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join(version_dir, "openpype_console.exe"), + os.path.join(version_dir, "openpype_console") + ] + return FileUtils.SearchFileList(";".join(exe_list)) + + def inject_openpype_environment(deadlinePlugin): """ Pull env vars from OpenPype and push them to rendering process. @@ -199,93 +286,29 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: - print(">>> Getting OpenPype executable ...") exe_list, dir_list = get_openpype_executable() - openpype_versions = [] - # if the job requires specific OpenPype version, - # lets go over all available and find compatible build. + exe = FileUtils.SearchFileList(exe_list) + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - print(( - ">>> Scanning for compatible requested version {}" - ).format(requested_version)) - install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if install_dir: - print("--- Looking for OpenPype at: {}".format(install_dir)) - sub_dirs = [ - f.path for f in os.scandir(install_dir) - if f.is_dir() - ] - for subdir in sub_dirs: - version = get_openpype_version_from_path(subdir) - if not version: - continue - print(" - found: {} - {}".format(version, subdir)) - openpype_versions.append((version, subdir)) + exe = get_requested_openpype_executable( + exe, dir_list, requested_version + ) + if exe is None: + raise RuntimeError(( + "Cannot find compatible version available for version {}" + " requested by the job. Please add it through plugin" + " configuration in Deadline or install it to configured" + " directory." + ).format(requested_version)) - exe = FileUtils.SearchFileList(exe_list) - if openpype_versions: - # if looking for requested compatible version, - # add the implicitly specified to the list too. - print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) - version = get_openpype_version_from_path( - os.path.dirname(exe)) - if version: - print(" - found: {} - {}".format( - version, os.path.dirname(exe) - )) - openpype_versions.append((version, os.path.dirname(exe))) - - if requested_version: - # sort detected versions - if openpype_versions: - # use natural sorting - openpype_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest available version found is {}" - ).format(openpype_versions[-1][0])) - requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 - compatible_versions = [] - for version in openpype_versions: - v = version[0].split(".")[:3] - if v[0] == requested_major and v[1] == requested_minor: - compatible_versions.append(version) - if not compatible_versions: - raise RuntimeError( - ("Cannot find compatible version available " - "for version {} requested by the job. " - "Please add it through plugin configuration " - "in Deadline or install it to configured " - "directory.").format(requested_version)) - # sort compatible versions nad pick the last one - compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest compatible version found is {}" - ).format(compatible_versions[-1][0])) - # create list of executables for different platform and let - # Deadline decide. - exe_list = [ - os.path.join( - compatible_versions[-1][1], "openpype_console.exe"), - os.path.join( - compatible_versions[-1][1], "openpype_console") - ] - exe = FileUtils.SearchFileList(";".join(exe_list)) - if exe == "": - raise RuntimeError( - "OpenPype executable was not found " + - "in the semicolon separated list " + - "\"" + ";".join(exe_list) + "\". " + - "The path to the render executable can be configured " + - "from the Plugin Configuration in the Deadline Monitor.") + if not exe: + raise RuntimeError(( + "OpenPype executable was not found in the semicolon " + "separated list \"{}\"." + "The path to the render executable can be configured" + " from the Plugin Configuration in the Deadline Monitor." + ).format(";".join(exe_list))) print("--- OpenPype executable: {}".format(exe)) From dbc72502b4cbf9859493d43ce90141f84ecc9420 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 17:57:37 +0100 Subject: [PATCH 229/409] few formatting changes --- .../custom/plugins/GlobalJobPreLoad.py | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 375cf48b8f..78e1371eee 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -326,22 +326,22 @@ def inject_openpype_environment(deadlinePlugin): export_url ] - add_args = {} - add_args['project'] = \ - job.GetJobEnvironmentKeyValue('AVALON_PROJECT') - add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET') - add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK') - add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') - add_args["envgroup"] = "farm" + add_kwargs = { + "project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"), + "asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"), + "task": job.GetJobEnvironmentKeyValue("AVALON_TASK"), + "app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"), + "envgroup": "farm" + } + if all(add_kwargs.values()): + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) - if all(add_args.values()): - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) else: - msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \ - "AVALON_TASK, AVALON_APP_NAME" - raise RuntimeError(msg) + raise RuntimeError(( + "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," + " AVALON_TASK, AVALON_APP_NAME" + )) if not os.environ.get("OPENPYPE_MONGO"): print(">>> Missing OPENPYPE_MONGO env var, process won't work") @@ -362,12 +362,12 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Loading file ...") with open(export_url) as fp: contents = json.load(fp) - for key, value in contents.items(): - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + + for key, value in contents.items(): + deadlinePlugin.SetProcessEnvironmentVariable(key, value) script_url = job.GetJobPluginInfoKeyValue("ScriptFilename") if script_url: - script_url = script_url.format(**contents).replace("\\", "/") print(">>> Setting script path {}".format(script_url)) job.SetJobPluginInfoKeyValue("ScriptFilename", script_url) From 61e5dc3fc9c326a90601e774722ae30b419ef390 Mon Sep 17 00:00:00 2001 From: Thomas Fricard <51854004+friquette@users.noreply.github.com> Date: Mon, 21 Nov 2022 18:21:04 +0100 Subject: [PATCH 230/409] change order of default value Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- openpype/settings/defaults/project_settings/tvpaint.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 9ccc318d70..e03ce32030 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -14,8 +14,8 @@ ], "families_to_review": [ "review", - "renderpass", - "renderlayer" + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { From e24c2f853b5b976e1c441470cf6e7f435e2c0815 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:15:34 +0100 Subject: [PATCH 231/409] attribute definitions can be hidden and disabled --- openpype/lib/attribute_definitions.py | 32 +++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 6baeaec045..ed151bbe4e 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -105,11 +105,14 @@ class AbtractAttrDef(object): How to force to set `key` attribute? Args: - key(str): Under which key will be attribute value stored. - label(str): Attribute label. - tooltip(str): Attribute tooltip. - is_label_horizontal(bool): UI specific argument. Specify if label is + key (str): Under which key will be attribute value stored. + default (Any): Default value of an attribute. + label (str): Attribute label. + tooltip (str): Attribute tooltip. + is_label_horizontal (bool): UI specific argument. Specify if label is next to value input or ahead. + hidden (bool): Will be item hidden (for UI purposes). + disabled (bool): Item will be visible but disabled (for UI purposes). """ type_attributes = [] @@ -117,16 +120,29 @@ class AbtractAttrDef(object): is_value_def = True def __init__( - self, key, default, label=None, tooltip=None, is_label_horizontal=None + self, + key, + default, + label=None, + tooltip=None, + is_label_horizontal=None, + hidden=False, + disabled=False ): if is_label_horizontal is None: is_label_horizontal = True + + if hidden is None: + hidden = False + self.key = key self.label = label self.tooltip = tooltip self.default = default self.is_label_horizontal = is_label_horizontal - self._id = uuid.uuid4() + self.hidden = hidden + self.disabled = disabled + self._id = uuid.uuid4().hex self.__init__class__ = AbtractAttrDef @@ -173,7 +189,9 @@ class AbtractAttrDef(object): "label": self.label, "tooltip": self.tooltip, "default": self.default, - "is_label_horizontal": self.is_label_horizontal + "is_label_horizontal": self.is_label_horizontal, + "hidden": self.hidden, + "disabled": self.disabled } for attr in self.type_attributes: data[attr] = getattr(self, attr) From 6abfa14e01d67eae20e7bb66c219feab99d70a37 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:16:02 +0100 Subject: [PATCH 232/409] added special definition for hidden values --- openpype/lib/attribute_definitions.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index ed151bbe4e..0df7b16e64 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -253,6 +253,26 @@ class UnknownDef(AbtractAttrDef): return value +class HiddenDef(AbtractAttrDef): + """Hidden value of Any type. + + This attribute can be used for UI purposes to pass values related + to other attributes (e.g. in multi-page UIs). + + Keep in mind the value should be possible to parse by json parser. + """ + + type = "hidden" + + def __init__(self, key, default=None, **kwargs): + kwargs["default"] = default + kwargs["hidden"] = True + super(UnknownDef, self).__init__(key, **kwargs) + + def convert_value(self, value): + return value + + class NumberDef(AbtractAttrDef): """Number definition. From fe392aa5db267ef09e0152d867eb02e45fee065e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:16:41 +0100 Subject: [PATCH 233/409] implemented hidden widget --- openpype/tools/attribute_defs/widgets.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index dc697b08a6..7f7c20009e 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -6,6 +6,7 @@ from Qt import QtWidgets, QtCore from openpype.lib.attribute_definitions import ( AbtractAttrDef, UnknownDef, + HiddenDef, NumberDef, TextDef, EnumDef, @@ -459,6 +460,29 @@ class UnknownAttrWidget(_BaseAttrDefWidget): self._input_widget.setText(str_value) +class HiddenAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + self.setVisible(False) + self._value = None + self._multivalue = False + + def setVisible(self, visible): + if visible: + visible = False + super(HiddenAttrWidget, self).setVisible(visible) + + def current_value(self): + if self._multivalue: + raise ValueError( + "{} can't output for multivalue.".format(self.__class__.__name__) + ) + return self._value + + def set_value(self, value, multivalue=False): + self._value = copy.deepcopy(value) + self._multivalue = multivalue + + class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( From 068ec3f89809eca1fcff32d81e36158f88dc248a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:17:09 +0100 Subject: [PATCH 234/409] enhanced attribute definitons widget --- openpype/tools/attribute_defs/widgets.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 7f7c20009e..6db6da58e1 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -23,6 +23,16 @@ from .files_widget import FilesWidget def create_widget_for_attr_def(attr_def, parent=None): + widget = _create_widget_for_attr_def(attr_def, parent) + if attr_def.hidden: + widget.setVisible(False) + + if attr_def.disabled: + widget.setEnabled(False) + return widget + + +def _create_widget_for_attr_def(attr_def, parent=None): if not isinstance(attr_def, AbtractAttrDef): raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( str(type(attr_def)), AbtractAttrDef @@ -43,6 +53,9 @@ def create_widget_for_attr_def(attr_def, parent=None): if isinstance(attr_def, UnknownDef): return UnknownAttrWidget(attr_def, parent) + if isinstance(attr_def, HiddenDef): + return HiddenAttrWidget(attr_def, parent) + if isinstance(attr_def, FileDef): return FileAttrWidget(attr_def, parent) @@ -116,6 +129,10 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): self._current_keys.add(attr_def.key) widget = create_widget_for_attr_def(attr_def, self) + self._widgets.append(widget) + + if attr_def.hidden: + continue expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: @@ -134,7 +151,6 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget): layout.addWidget( widget, row, col_num, 1, expand_cols ) - self._widgets.append(widget) row += 1 def set_value(self, value): From a606de5b76b63a6051731f292daa7b0420bfbbde Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:17:56 +0100 Subject: [PATCH 235/409] don't add hidden widgets to publisher widgets --- openpype/tools/publisher/widgets/widgets.py | 26 +++++++++++++++------ 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ce3d91ce63..a0d97245ba 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,6 +9,7 @@ import collections from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -1303,6 +1304,13 @@ class CreatorAttrsWidget(QtWidgets.QWidget): else: widget.set_value(values, True) + widget.value_changed.connect(self._input_value_changed) + self._attr_def_id_to_instances[attr_def.id] = attr_instances + self._attr_def_id_to_attr_def[attr_def.id] = attr_def + + if attr_def.hidden: + continue + expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: expand_cols = 1 @@ -1321,13 +1329,8 @@ class CreatorAttrsWidget(QtWidgets.QWidget): content_layout.addWidget( widget, row, col_num, 1, expand_cols ) - row += 1 - widget.value_changed.connect(self._input_value_changed) - self._attr_def_id_to_instances[attr_def.id] = attr_instances - self._attr_def_id_to_attr_def[attr_def.id] = attr_def - self._scroll_area.setWidget(content_widget) self._content_widget = content_widget @@ -1421,8 +1424,17 @@ class PublishPluginAttrsWidget(QtWidgets.QWidget): widget = create_widget_for_attr_def( attr_def, content_widget ) - label = attr_def.label or attr_def.key - content_layout.addRow(label, widget) + hidden_widget = attr_def.hidden + # Hide unknown values of publish plugins + # - The keys in most of cases does not represent what would + # label represent + if isinstance(attr_def, UnknownDef): + widget.setVisible(False) + hidden_widget = True + + if not hidden_widget: + label = attr_def.label or attr_def.key + content_layout.addRow(label, widget) widget.value_changed.connect(self._input_value_changed) From 29cc9bdce61ea3ce1dc01d17bb05c3a2db3afffe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 21 Nov 2022 19:39:10 +0100 Subject: [PATCH 236/409] Fix line length --- openpype/tools/attribute_defs/widgets.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index 6db6da58e1..1ffb3d3799 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -489,9 +489,9 @@ class HiddenAttrWidget(_BaseAttrDefWidget): def current_value(self): if self._multivalue: - raise ValueError( - "{} can't output for multivalue.".format(self.__class__.__name__) - ) + raise ValueError("{} can't output for multivalue.".format( + self.__class__.__name__ + )) return self._value def set_value(self, value, multivalue=False): From 7bf1d0bc9b2efb05e9904a4784ded5ea2da5b717 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 22 Nov 2022 18:56:20 +0800 Subject: [PATCH 237/409] aov filtering --- openpype/hosts/maya/api/lib_renderproducts.py | 1 - .../modules/deadline/plugins/publish/submit_publish_job.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index 6fde0df162..c54e3ab3e0 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -1054,7 +1054,6 @@ class RenderProductsRedshift(ARenderProducts): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - # aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index c1e9dd4015..6362b4ca65 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -500,7 +500,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if instance_data.get("multipartExr"): preview = True - self.log.info("preview:{}".format(preview)) + self.log.debug("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -543,7 +543,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - self.log.info("instances:{}".format(instances)) + self.log.debug("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): From 996bd4897b80ae72a06a8bbc81c1b69d471485ac Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 12:04:47 +0100 Subject: [PATCH 238/409] tabs widget can set current tab by index --- openpype/tools/publisher/widgets/tabs_widget.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index 84638a002c..eb3eda8c19 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -68,7 +68,16 @@ class PublisherTabsWidget(QtWidgets.QFrame): self.set_current_tab(identifier) return button + def get_tab_by_index(self, index): + if index < 0 or index > self._btns_layout.count(): + return None + item = self._btns_layout.itemAt(index) + return item.widget() + def set_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier From 430f30c05e3dc53184277ed121efd0fdcd003b3a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 12:05:22 +0100 Subject: [PATCH 239/409] added helper methods to know on which tab we are --- openpype/tools/publisher/window.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index f107c0e505..3879e37ad7 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -539,6 +539,18 @@ class PublisherWindow(QtWidgets.QDialog): def _go_to_report_tab(self): self._set_current_tab("report") + def _is_on_create_tab(self): + self._is_current_tab("create") + + def _is_on_publish_tab(self): + self._is_current_tab("publish") + + def _is_on_details_tab(self): + self._is_current_tab("details") + + def _is_on_report_tab(self): + self._is_current_tab("report") + def _set_publish_overlay_visibility(self, visible): if visible: widget = self._publish_overlay From ac9b9b208e055c856c32313a87d20bf1dbf403c3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 12:54:28 +0100 Subject: [PATCH 240/409] OP-4196 - safer getter for published_path published_path might be missing in case of thumbnail not getting published. This implementation takes from staging if published_path not present --- .../slack/plugins/publish/integrate_slack_api.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 643e55915b..f40a13db9f 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -142,13 +142,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): def _get_thumbnail_path(self, instance): """Returns abs url for thumbnail if present in instance repres""" - published_path = None + thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + self.log.info(repre) + repre_thumbnail_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_thumbnail_path): + self.log.info("exists") + thumbnail_path = repre_thumbnail_path break - return published_path + return thumbnail_path def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" From c61098b782492728f7dbbe667b2540b2805b35ba Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:00:34 +0100 Subject: [PATCH 241/409] OP-4196 - fix when task_data is not dict In legacy cases task might be only string with its name, not structure with additional metadata (type etc.). This implementation handles that. --- .../modules/slack/plugins/publish/integrate_slack_api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index f40a13db9f..6138671180 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -121,10 +121,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): ): fill_pairs.append(("task", task_data["name"])) - else: + elif isinstance(task_data, dict): for key, value in task_data.items(): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) + else: + # fallback for legacy - if task_data is only task name + fill_pairs.append(("task", task_data)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) From f993842c4ec7a4e91b5a42cbd61ddba0f9387a35 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:01:27 +0100 Subject: [PATCH 242/409] OP-4196 - remove unnecessary logging --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 6138671180..e43b07b228 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -148,13 +148,11 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - self.log.info(repre) repre_thumbnail_path = ( repre.get("published_path") or os.path.join(repre["stagingDir"], repre["files"]) ) if os.path.exists(repre_thumbnail_path): - self.log.info("exists") thumbnail_path = repre_thumbnail_path break return thumbnail_path From ab17acddc7c192dab58727e87fe87b51e242a3df Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 13:39:24 +0100 Subject: [PATCH 243/409] OP-4196 - better handling of data It should take task from instance anatomyData, then from context and handle non dict items. --- .../slack/plugins/publish/integrate_slack_api.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index e43b07b228..2c6f3d21bd 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -112,7 +112,13 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = fill_data.get("task") + task_data = ( + copy.deepcopy(instance.data.get("anatomyData", [])).get("task") + or fill_data.get("task") + ) + if not isinstance(task_data, dict): + # fallback for legacy - if task_data is only task name + task_data["name"] = task_data if task_data: if ( "{task}" in message_templ @@ -121,13 +127,10 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): ): fill_pairs.append(("task", task_data["name"])) - elif isinstance(task_data, dict): + else: for key, value in task_data.items(): fill_key = "task[{}]".format(key) fill_pairs.append((fill_key, value)) - else: - # fallback for legacy - if task_data is only task name - fill_pairs.append(("task", task_data)) self.log.debug("fill_pairs ::{}".format(fill_pairs)) multiple_case_variants = prepare_template_data(fill_pairs) From 3cd241d2dbfa56a43ae2199fb1c38bd236497cd1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 14:01:10 +0100 Subject: [PATCH 244/409] OP-4196 - fix wrong return type --- openpype/modules/slack/plugins/publish/integrate_slack_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 2c6f3d21bd..9539d03306 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -113,7 +113,7 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): fill_pairs.append(("review_filepath", review_path)) task_data = ( - copy.deepcopy(instance.data.get("anatomyData", [])).get("task") + copy.deepcopy(instance.data.get("anatomyData", {})).get("task") or fill_data.get("task") ) if not isinstance(task_data, dict): From 8a121bc0ff43e86bbe42d660a29e1d1fed13e08c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:11:58 +0100 Subject: [PATCH 245/409] move default settings from 'project_settings/global/tools/publish/template_name_profiles' to legacy place --- .../defaults/project_settings/global.json | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index b8995de99e..46b8b1b0c8 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -288,6 +288,17 @@ "task_types": [], "tasks": [], "template_name": "maya2unreal" + }, + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "tasks": [], + "template_name": "online" } ] }, @@ -484,19 +495,7 @@ ] }, "publish": { - "template_name_profiles": [ - { - "families": [ - "online" - ], - "hosts": [ - "traypublisher" - ], - "task_types": [], - "task_names": [], - "template_name": "online" - } - ], + "template_name_profiles": [], "hero_template_name_profiles": [] } }, From 788ed6478006c17644460945c9a60cc8207a036c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:50:46 +0100 Subject: [PATCH 246/409] fix typo --- openpype/tools/publisher/widgets/card_view_widgets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 9fd2bf0824..72644c09db 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -674,9 +674,9 @@ class InstanceCardView(AbstractInstanceView): instances_by_group[group_name] ) - self._update_ordered_group_nameS() + self._update_ordered_group_names() - def _update_ordered_group_nameS(self): + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: From 3ba5f8e0e99798c62ec295ea2a3706f3da8aac37 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:51:05 +0100 Subject: [PATCH 247/409] fix tas combobox sizes --- openpype/tools/publisher/widgets/widgets.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index ce3d91ce63..332e231653 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -578,6 +578,11 @@ class TasksCombobox(QtWidgets.QComboBox): self._text = None + # Make sure combobox is extended horizontally + size_policy = self.sizePolicy() + size_policy.setHorizontalPolicy(size_policy.MinimumExpanding) + self.setSizePolicy(size_policy) + def set_invalid_empty_task(self, invalid=True): self._proxy_model.set_filter_empty(invalid) if invalid: From b2065acd7a43724ecd522a9f14531f3a45df38ce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:51:44 +0100 Subject: [PATCH 248/409] added ability to know if views have any items --- .../publisher/widgets/card_view_widgets.py | 7 +++++++ .../publisher/widgets/list_view_widgets.py | 7 +++++++ .../tools/publisher/widgets/overview_widget.py | 4 ++++ openpype/tools/publisher/widgets/widgets.py | 17 ++++++++++++++++- 4 files changed, 34 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 72644c09db..09635d1a15 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -676,6 +676,13 @@ class InstanceCardView(AbstractInstanceView): self._update_ordered_group_names() + def has_items(self): + if self._convertor_items_group is not None: + return True + if self._widgets_by_group: + return True + return False + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 32d84862f0..1cdb4cdcdb 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -912,6 +912,13 @@ class InstanceListView(AbstractInstanceView): if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + def has_items(self): + if self._convertor_group_widget is not None: + return True + if self._group_items: + return True + return False + def get_selected_items(self): """Get selected instance ids and context selection. diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index 1c924d1631..b1aeda9cd4 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -205,6 +205,10 @@ class OverviewWidget(QtWidgets.QFrame): self._subset_views_widget.height() ) + def has_items(self): + view = self._subset_views_layout.currentWidget() + return view.has_items() + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 332e231653..d6c6f8673c 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -305,6 +305,20 @@ class AbstractInstanceView(QtWidgets.QWidget): "{} Method 'refresh' is not implemented." ).format(self.__class__.__name__)) + def has_items(self): + """View has at least one item. + + This is more a question for controller but is called from widget + which should probably should not use controller. + + Returns: + bool: There is at least one instance or conversion item. + """ + + raise NotImplementedError(( + "{} Method 'has_items' is not implemented." + ).format(self.__class__.__name__)) + def get_selected_items(self): """Selected instances required for callbacks. @@ -1185,7 +1199,7 @@ class GlobalAttrsWidget(QtWidgets.QWidget): """Set currently selected instances. Args: - instances(list): List of selected instances. + instances(List[CreatedInstance]): List of selected instances. Empty instances tells that nothing or context is selected. """ self._set_btns_visible(False) @@ -1619,6 +1633,7 @@ class SubsetAttributesWidget(QtWidgets.QWidget): instances(List[CreatedInstance]): List of currently selected instances. context_selected(bool): Is context selected. + convertor_identifiers(List[str]): Identifiers of convert items. """ all_valid = True From d87e8fe99c68c23a5fdf1ce19fc0debe654eee97 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:52:06 +0100 Subject: [PATCH 249/409] tabs widget can accept tab indexes --- openpype/tools/publisher/widgets/tabs_widget.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index eb3eda8c19..d8ad19cfc0 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -54,6 +54,9 @@ class PublisherTabsWidget(QtWidgets.QFrame): self._buttons_by_identifier = {} def is_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier return self._current_identifier == identifier @@ -69,10 +72,10 @@ class PublisherTabsWidget(QtWidgets.QFrame): return button def get_tab_by_index(self, index): - if index < 0 or index > self._btns_layout.count(): - return None - item = self._btns_layout.itemAt(index) - return item.widget() + if 0 >= index < self._btns_layout.count(): + item = self._btns_layout.itemAt(index) + return item.widget() + return None def set_current_tab(self, identifier): if isinstance(identifier, int): From dd50c6723e1ec892478205f72de2e0bf57940d35 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:55:39 +0100 Subject: [PATCH 250/409] small teaks and fixes --- openpype/tools/publisher/window.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 3879e37ad7..59dd2e6ec9 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -432,7 +432,7 @@ class PublisherWindow(QtWidgets.QDialog): self._update_create_overlay_size() self._update_create_overlay_visibility() - if self._is_current_tab("create"): + if self._is_on_create_tab(): self._install_app_event_listener() # Reset if requested @@ -450,7 +450,7 @@ class PublisherWindow(QtWidgets.QDialog): self._context_label.setText(label) def _update_publish_details_widget(self, force=False): - if not force and not self._is_current_tab("details"): + if not force and not self._is_on_details_tab(): return report_data = self.controller.get_publish_report() @@ -540,16 +540,16 @@ class PublisherWindow(QtWidgets.QDialog): self._set_current_tab("report") def _is_on_create_tab(self): - self._is_current_tab("create") + return self._is_current_tab("create") def _is_on_publish_tab(self): - self._is_current_tab("publish") + return self._is_current_tab("publish") def _is_on_details_tab(self): - self._is_current_tab("details") + return self._is_current_tab("details") def _is_on_report_tab(self): - self._is_current_tab("report") + return self._is_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -601,11 +601,8 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() - if ( - not self._is_current_tab("create") - and not self._is_current_tab("publish") ): - self._set_current_tab("publish") + self._go_to_publish_tab() def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -621,8 +618,8 @@ class PublisherWindow(QtWidgets.QDialog): self._publish_details_widget.close_details_popup() - if self._is_current_tab(self._create_tab): - self._set_current_tab("publish") + if self._is_on_create_tab(): + self._go_to_publish_tab() def _on_publish_validated_change(self, event): if event["value"]: @@ -635,7 +632,7 @@ class PublisherWindow(QtWidgets.QDialog): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._is_current_tab("publish"): + if self._is_on_publish_tab(): self._go_to_report_tab() if validate_enabled: From caf94fb68f789d528e69cf6b423b29b20fe16369 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 15:57:17 +0100 Subject: [PATCH 251/409] show publisher can accept tab to switch to --- openpype/tools/publisher/window.py | 62 +++++++++++++++++++++++++++++- openpype/tools/utils/host_tools.py | 16 ++++---- 2 files changed, 67 insertions(+), 11 deletions(-) diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 59dd2e6ec9..0f7fd2c7e3 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -156,7 +156,7 @@ class PublisherWindow(QtWidgets.QDialog): footer_layout.addWidget(footer_bottom_widget, 0) # Content - # - wrap stacked widget under one more widget to be able propagate + # - wrap stacked widget under one more widget to be able to propagate # margins (QStackedLayout can't have margins) content_widget = QtWidgets.QWidget(under_publish_widget) @@ -267,6 +267,9 @@ class PublisherWindow(QtWidgets.QDialog): controller.event_system.add_callback( "publish.reset.finished", self._on_publish_reset ) + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) controller.event_system.add_callback( "publish.process.started", self._on_publish_start ) @@ -337,11 +340,13 @@ class PublisherWindow(QtWidgets.QDialog): self._controller = controller self._first_show = True + self._first_reset = True # This is a little bit confusing but 'reset_on_first_show' is too long - # forin init + # for init self._reset_on_first_show = reset_on_show self._reset_on_show = True self._publish_frame_visible = None + self._tab_on_reset = None self._error_messages_to_show = collections.deque() self._errors_dialog_message_timer = errors_dialog_message_timer @@ -353,12 +358,21 @@ class PublisherWindow(QtWidgets.QDialog): self._show_timer = show_timer self._show_counter = 0 + self._window_is_visible = False @property def controller(self): return self._controller + def make_sure_is_visible(self): + if self._window_is_visible: + self.setWindowState(QtCore.Qt.ActiveWindow) + + else: + self.show() + def showEvent(self, event): + self._window_is_visible = True super(PublisherWindow, self).showEvent(event) if self._first_show: self._first_show = False @@ -372,6 +386,7 @@ class PublisherWindow(QtWidgets.QDialog): self._update_create_overlay_size() def closeEvent(self, event): + self._window_is_visible = False self._uninstall_app_event_listener() self.save_changes() self._reset_on_show = True @@ -449,6 +464,19 @@ class PublisherWindow(QtWidgets.QDialog): def set_context_label(self, label): self._context_label.setText(label) + def set_tab_on_reset(self, tab): + """Define tab that will be selected on window show. + + This is single use method, when publisher window is showed the value is + unset and not used on next show. + + Args: + tab (Union[int, Literal[create, publish, details, report]]: Index + or name of tab which will be selected on show (after reset). + """ + + self._tab_on_reset = tab + def _update_publish_details_widget(self, force=False): if not force and not self._is_on_details_tab(): return @@ -524,6 +552,11 @@ class PublisherWindow(QtWidgets.QDialog): def _set_current_tab(self, identifier): self._tabs_widget.set_current_tab(identifier) + def set_current_tab(self, tab): + self._set_current_tab(tab) + if not self._window_is_visible: + self.set_tab_on_reset(tab) + def _is_current_tab(self, identifier): return self._tabs_widget.is_current_tab(identifier) @@ -601,7 +634,32 @@ class PublisherWindow(QtWidgets.QDialog): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() + + def _on_controller_reset(self): + self._first_reset, first_reset = False, self._first_reset + if self._tab_on_reset is not None: + self._tab_on_reset, new_tab = None, self._tab_on_reset + self._set_current_tab(new_tab) + return + + # On first reset change tab based on available items + # - if there is at least one instance the tab is changed to 'publish' + # otherwise 'create' is used + # - this happens only on first show + if first_reset: + if self._overview_widget.has_items(): + self._go_to_publish_tab() + else: + self._go_to_create_tab() + + elif ( + not self._is_on_create_tab() + and not self._is_on_publish_tab() ): + # If current tab is not 'Create' or 'Publish' go to 'Publish' + # - this can happen when publishing started and was reset + # at that moment it doesn't make sense to stay at publish + # specific tabs. self._go_to_publish_tab() def _on_publish_start(self): diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 046dcbdf6a..e8593a8ae2 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -285,14 +285,12 @@ class HostToolsHelper: return self._publisher_tool - def show_publisher_tool(self, parent=None, controller=None): + def show_publisher_tool(self, parent=None, controller=None, tab=None): with qt_app_context(): - dialog = self.get_publisher_tool(parent, controller) - - dialog.show() - dialog.raise_() - dialog.activateWindow() - dialog.showNormal() + window = self.get_publisher_tool(parent, controller) + if tab: + window.set_current_tab(tab) + window.make_sure_is_visible() def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -446,8 +444,8 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) -def show_publisher(parent=None): - _SingletonPoint.show_tool_by_name("publisher", parent) +def show_publisher(parent=None, **kwargs): + _SingletonPoint.show_tool_by_name("publisher", parent, **kwargs) def show_experimental_tools_dialog(parent=None): From 6af4412591b45f2001a9f01e998a36e871666ec9 Mon Sep 17 00:00:00 2001 From: clement hector Date: Tue, 22 Nov 2022 16:08:03 +0100 Subject: [PATCH 252/409] set creator window as parent of pop up window --- .../hosts/photoshop/plugins/create/create_legacy_image.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 2792a775e0..7672458165 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -29,7 +29,8 @@ class CreateImage(create.LegacyCreator): if len(selection) > 1: # Ask user whether to create one image or image per selected # item. - msg_box = QtWidgets.QMessageBox() + active_window = QtWidgets.QApplication.activeWindow() + msg_box = QtWidgets.QMessageBox(parent=active_window) msg_box.setIcon(QtWidgets.QMessageBox.Warning) msg_box.setText( "Multiple layers selected." @@ -102,7 +103,7 @@ class CreateImage(create.LegacyCreator): if group.long_name: for directory in group.long_name[::-1]: name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + replace(stub.LOADED_ICON, '') long_names.append(name) self.data.update({"subset": subset_name}) From 861cdadc9bbcd171da0d8793de6595db8446efce Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 22 Nov 2022 16:48:02 +0100 Subject: [PATCH 253/409] fix formatting --- openpype/tools/publisher/widgets/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index d6c6f8673c..6bc09c55a3 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -316,7 +316,7 @@ class AbstractInstanceView(QtWidgets.QWidget): """ raise NotImplementedError(( - "{} Method 'has_items' is not implemented." + "{} Method 'has_items' is not implemented." ).format(self.__class__.__name__)) def get_selected_items(self): From 3b81c7f5731dfc5c018bff11e9758fc3e5e26450 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 18:15:27 +0100 Subject: [PATCH 254/409] OP-4196 - better logging of file upload errors --- .../slack/plugins/publish/integrate_slack_api.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 9539d03306..0cd5ec9de8 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -188,10 +188,17 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin): channel=channel, title=os.path.basename(p_file) ) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(p_file)) - file_ids.append(response["file"]["id"]) + if response.get("error"): + error_str = self._enrich_error( + str(response.get("error")), + channel) + self.log.warning( + "Error happened: {}".format(error_str)) + else: + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(p_file)) + file_ids.append(response["file"]["id"]) if publish_files: message += attachment_str From 855e7d1c61c16093706b276435aed02fbb108e91 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 22 Nov 2022 18:28:01 +0100 Subject: [PATCH 255/409] OP-4196 - fix filtering profiles Task types didn't work. --- .../modules/slack/plugins/publish/collect_slack_family.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 39b05937dc..27e899d59a 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -18,15 +18,15 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = legacy_io.Session.get("AVALON_TASK") + task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) key_values = { "families": family, - "tasks": task_name, + "tasks": task_data.get("name"), + "task_types": task_data.get("type"), "hosts": instance.data["anatomyData"]["app"], "subsets": instance.data["subset"] } - profile = filter_profiles(self.profiles, key_values, logger=self.log) From 110cd58fd168861b91cbdee366324edbe21c8917 Mon Sep 17 00:00:00 2001 From: Joseff Date: Tue, 22 Nov 2022 18:40:42 +0100 Subject: [PATCH 256/409] Update the UOpenPypePublishInstance to use UDataAsset --- .../Private/OpenPypePublishInstance.cpp | 184 ++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 54 ++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../Private/OpenPypePublishInstance.cpp | 185 +++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 61 +++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- 8 files changed, 330 insertions(+), 174 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..72dc617699 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,147 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split("/" + GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + UObject* Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(NULL)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..54e24e03d7 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -5,17 +5,51 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + return AssetDataInternal.Union(AssetDataExternal); + }; + + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const UObject* InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..9a89c3868b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,148 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "AssetToolsModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split(GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + const TObjectPtr Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(nullptr)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const TObjectPtr& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const TObjectPtr& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..97df757acd 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,21 +1,62 @@ #pragma once +#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet> AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); + + TSet ResultSet; + + for (auto& Asset : Unionized) + ResultSet.Add(Asset.Get()); + + return ResultSet; + } + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const TObjectPtr& InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; From c3e5b7a169c670b35889a5fb0038ba4b50bf7841 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Nov 2022 21:21:42 +0100 Subject: [PATCH 257/409] update history.md --- HISTORY.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/HISTORY.md b/HISTORY.md index f6cc74e114..7365696f96 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,40 @@ # Changelog +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) + +### 📖 Documentation + +- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025) + +**🆕 New features** + +- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041) + +**🚀 Enhancements** + +- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040) +- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026) +- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023) +- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020) +- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018) +- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015) + +**🐛 Bug fixes** + +- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038) +- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037) +- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034) +- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032) +- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029) +- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027) +- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016) + +**🔀 Refactored code** + +- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017) + ## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) From e600cd1b3d2963a2a2e26dce79e07818bb4c5d28 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 22 Nov 2022 21:24:11 +0100 Subject: [PATCH 258/409] updating to 3.14.7 --- CHANGELOG.md | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 707b61676f..c3cccf2d1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,63 @@ # Changelog -## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) ### 📖 Documentation From c63f468484b32628c6d87a35df993bf2303ecb83 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 03:35:08 +0000 Subject: [PATCH 259/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 0116b49f4d..a4af8b7a99 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.6" +__version__ = "3.14.7-nightly.7" From 56d5bf89212ef8f9c65bc782a11600ea62291e8a Mon Sep 17 00:00:00 2001 From: Joseff Date: Wed, 23 Nov 2022 10:58:11 +0100 Subject: [PATCH 260/409] Changed the member variables to use TSoftObjectPtr<> TSets now use TSoftObjectPtr<> for referencing the assets, which can reduce memory usage. --- .../Private/OpenPypePublishInstance.cpp | 25 +++--- .../OpenPype/Public/OpenPypePublishInstance.h | 84 +++++++++++++++---- .../Private/OpenPypePublishInstance.cpp | 15 ++-- .../OpenPype/Public/OpenPypePublishInstance.h | 79 ++++++++++++----- 4 files changed, 146 insertions(+), 57 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 72dc617699..ed81104c05 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -15,6 +15,9 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< FAssetRegistryModule>("AssetRegistry"); + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + FString Left, Right; GetPathName().Split("/" + GetName(), &Left, &Right); @@ -33,7 +36,6 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); - } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -53,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -63,14 +67,14 @@ void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { if (Cast(InAssetData.GetAsset()) == nullptr) { - if (AssetDataInternal.Contains(NULL)) + if (AssetDataInternal.Contains(nullptr)) { - AssetDataInternal.Remove(NULL); + AssetDataInternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataInternal) } else { - AssetDataExternal.Remove(NULL); + AssetDataExternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataExternal) } } @@ -121,22 +125,21 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( UOpenPypePublishInstance, AssetDataExternal)) { - // Check for duplicated assets for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { AssetDataExternal.Remove(Asset); - return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); } - } // Check if no UOpenPypePublishInstance type assets are included for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 54e24e03d7..0e946fb039 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -11,32 +11,80 @@ class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset public: - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet AssetDataInternal; + /** + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + +private: + + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; /** * This property allows exposing the array to include other assets from any other directory than what it's currently * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) + UPROPERTY(EditAnywhere, Category = "Assets") bool bAddExternalAssets = false; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet AssetDataExternal; + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") + TSet> AssetDataExternal; - /** - * Function for returning all the assets in the container. - * - * @return Returns all the internal and externally added assets into one set (TSet). - */ - UFUNCTION(BlueprintCallable, Category = Python) - TSet GetAllAssets() const - { - return AssetDataInternal.Union(AssetDataExternal); - }; - - -private: void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 9a89c3868b..c2c7e249c3 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -35,6 +35,7 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -54,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -124,7 +127,7 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro { // Check for duplicated assets - for (const TObjectPtr& Asset : AssetDataInternal) + for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { @@ -135,9 +138,9 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro } // Check if no UOpenPypePublishInstance type assets are included - for (const TObjectPtr& Asset : AssetDataExternal) + for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 97df757acd..2f066bd94b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -9,41 +9,78 @@ UCLASS(Blueprintable) class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { GENERATED_UCLASS_BODY() - public: - - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet> AssetDataInternal; - /** - * This property allows exposing the array to include other assets from any other directory than what it's currently - * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) - bool bAddExternalAssets = false; + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet> AssetDataExternal; + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } /** - * Function for returning all the assets in the container. + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. * - * @return Returns all the internal and externally added assets into one set (TSet). + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! */ - UFUNCTION(BlueprintCallable, Category = Python) + UFUNCTION(BlueprintCallable, BlueprintPure) TSet GetAllAssets() const { - TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); - - TSet ResultSet; + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; - for (auto& Asset : Unionized) - ResultSet.Add(Asset.Get()); + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); return ResultSet; } private: + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows the instance to include other assets from any other directory than what it's currently + * monitoring. + * @attention assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category="Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); @@ -52,11 +89,9 @@ private: bool IsUnderSameDir(const TObjectPtr& InAsset) const; #ifdef WITH_EDITOR - + void SendNotification(const FString& Text) const; virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; #endif - }; - From 0d88af8aec4c6112be2629865da7ffce4a7cce4d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 11:40:16 +0100 Subject: [PATCH 261/409] update latest 3.14.7 --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c3cccf2d1e..0c5f2cf8b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ **🚀 Enhancements** +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) - Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) - General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) - Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) @@ -25,6 +28,7 @@ **🐛 Bug fixes** - General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) - Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) - Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) - Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) @@ -54,7 +58,6 @@ - remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) - Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) - ## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) From 6725c1f6d8dc025f13bffbbb1c92a242c49b618f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 11:40:57 +0100 Subject: [PATCH 262/409] udpate history --- HISTORY.md | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/HISTORY.md b/HISTORY.md index 7365696f96..04a1073c07 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,64 @@ # Changelog + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + ## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) From 2594bc2a0efa19331b7dbccb2624be41acf1032a Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 10:45:17 +0000 Subject: [PATCH 263/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index a4af8b7a99..a00c7de704 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.7" +__version__ = "3.14.7-nightly.8" From 8b1b09b33825dc9ff320b6c6e49597dedcf58f7f Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 23 Nov 2022 10:58:00 +0000 Subject: [PATCH 264/409] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index a00c7de704..ffabcf8025 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.8" +__version__ = "3.14.7" From 5779687a2b4467195a20bd9242d2fa782f7b27cd Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Wed, 23 Nov 2022 11:59:53 +0100 Subject: [PATCH 265/409] Removed unused argument --- .../deadline/repository/custom/plugins/GlobalJobPreLoad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 78e1371eee..40193bac71 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -192,7 +192,7 @@ def get_openpype_executable(): return exe_list, dir_list -def get_openpype_versions(exe_list, dir_list): +def get_openpype_versions(dir_list): print(">>> Getting OpenPype executable ...") openpype_versions = [] From ec0f6986158b2eab312ade16430c0eb969aa0e68 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 14:52:31 +0100 Subject: [PATCH 266/409] celaction: fixing host --- openpype/hosts/celaction/__init__.py | 10 +++ openpype/hosts/celaction/addon.py | 24 ++++++ .../hooks/pre_celaction_registers.py | 73 ++++++++++++------- 3 files changed, 80 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/celaction/addon.py diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py index e69de29bb2..8983d48d7d 100644 --- a/openpype/hosts/celaction/__init__.py +++ b/openpype/hosts/celaction/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + CELACTION_ROOT_DIR, + CelactionAddon, +) + + +__all__ = ( + "CELACTION_ROOT_DIR", + "CelactionAddon", +) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py new file mode 100644 index 0000000000..c6d30935a1 --- /dev/null +++ b/openpype/hosts/celaction/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CelactionAddon(OpenPypeModule, IHostAddon): + name = "celaction" + host_name = "celaction" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".scn"] diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py index e49e66f163..84ac3d130a 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_registers.py @@ -27,17 +27,24 @@ class CelactionPrelaunchHook(PreLaunchHook): app = "celaction_publish" # setting output parameters - path = r"Software\CelAction\CelAction2D\User Settings" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) + path_user_settings = "\\".join([ + "Software", "CelAction", "CelAction2D", "User Settings" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings) hKey = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - "Software\\CelAction\\CelAction2D\\User Settings", 0, - winreg.KEY_ALL_ACCESS) + winreg.HKEY_CURRENT_USER, path_user_settings, 0, + winreg.KEY_ALL_ACCESS + ) - # TODO: this will need to be checked more thoroughly - pype_exe = os.getenv("OPENPYPE_EXECUTABLE") + openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") - winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe) + winreg.SetValueEx( + hKey, + "SubmitAppTitle", + 0, + winreg.REG_SZ, + openpype_executable + ) parameters = [ "launch", @@ -53,33 +60,45 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionHeight *Y*", # "--programDir \"'*PROGPATH*'\"" ] - winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters)) + winreg.SetValueEx( + hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, + " ".join(parameters) + ) # setting resolution parameters - path = r"Software\CelAction\CelAction2D\User Settings\Dialogs" - path += r"\SubmitOutput" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_submit = "\\".join([ + path_user_settings, "Dialogs", "SubmitOutput" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_submit, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) # making sure message dialogs don't appear when overwriting - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\OverwriteScene" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_overwrite_scene = "\\".join([ + path_user_settings, "Messages", "OverwriteScene" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\SceneSaved" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + # set scane as not saved + path_scene_saved = "\\".join([ + path_user_settings, "Messages", "SceneSaved" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_scene_saved, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) @@ -90,11 +109,11 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - pype_celaction_dir = os.path.dirname(os.path.dirname( + openpype_celaction_dir = os.path.dirname(os.path.dirname( os.path.abspath(celaction.__file__) )) template_path = os.path.join( - pype_celaction_dir, + openpype_celaction_dir, "resources", "celaction_template_scene.scn" ) From c3b7e3269544d3471c02e193acd4054b5a08eb08 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:30:49 +0100 Subject: [PATCH 267/409] skip turning on/off of autosync --- .../publish/integrate_hierarchy_ftrack.py | 43 +------------------ 1 file changed, 2 insertions(+), 41 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index fa7a89050c..6bae922d94 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -8,9 +8,6 @@ import pyblish.api from openpype.client import get_asset_by_id from openpype.lib import filter_profiles - -# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` -CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" @@ -97,18 +94,9 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.task_types = self.get_all_task_types(project) self.task_statuses = self.get_task_statuses(project) - # disable termporarily ftrack project's autosyncing - if auto_sync_state: - self.auto_sync_off(project) + # import ftrack hierarchy + self.import_to_ftrack(project_name, hierarchy_context) - try: - # import ftrack hierarchy - self.import_to_ftrack(project_name, hierarchy_context) - except Exception: - raise - finally: - if auto_sync_state: - self.auto_sync_on(project) def import_to_ftrack(self, project_name, input_data, parent=None): # Prequery hiearchical custom attributes @@ -381,33 +369,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return entity - def auto_sync_off(self, project): - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False - - self.log.info("Ftrack autosync swithed off") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - def auto_sync_on(self, project): - - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True - - self.log.info("Ftrack autosync swithed on") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which From 635c662a8c357c5170aadfb9197081a16c27c3b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:32:04 +0100 Subject: [PATCH 268/409] raise known publish error if project in ftrack was not found --- .../publish/integrate_hierarchy_ftrack.py | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 6bae922d94..8b0e4ab62d 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -7,6 +7,7 @@ import pyblish.api from openpype.client import get_asset_by_id from openpype.lib import filter_profiles +from openpype.pipeline import KnownPublishError CUST_ATTR_GROUP = "openpype" @@ -16,7 +17,6 @@ CUST_ATTR_GROUP = "openpype" def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] hier_custom_attributes = [] - # TODO remove deprecated "avalon" group from query cust_attrs_query = ( "select id, entity_type, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" @@ -76,19 +76,25 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): create_task_status_profiles = [] def process(self, context): - self.context = context - if "hierarchyContext" not in self.context.data: + if "hierarchyContext" not in context.data: return hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - session = self.context.data["ftrackSession"] - project_name = self.context.data["projectEntity"]["name"] - query = 'Project where full_name is "{}"'.format(project_name) - project = session.query(query).one() - auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] + session = context.data["ftrackSession"] + project_name = context.data["projectName"] + project = session.query( + 'select id, full_name from Project where full_name is "{}"'.format( + project_name + ) + ).first() + if not project: + raise KnownPublishError( + "Project \"{}\" was not found on ftrack.".format(project_name) + ) + self.context = context self.session = session self.ft_project = project self.task_types = self.get_all_task_types(project) From 5a0cc527325642c9871323a6aba8c263be72d194 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:34:02 +0100 Subject: [PATCH 269/409] implemented helper methods to query information we need from ftrack --- .../publish/integrate_hierarchy_ftrack.py | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 8b0e4ab62d..02946f813f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -103,6 +103,129 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # import ftrack hierarchy self.import_to_ftrack(project_name, hierarchy_context) + def query_ftrack_entitites(self, session, ft_project): + project_id = ft_project["id"] + entities = session.query(( + "select id, name, parent_id" + " from TypedContext where project_id is \"{}\"" + ).format(project_id)).all() + + entities_by_id = {} + entities_by_parent_id = collections.defaultdict(list) + for entity in entities: + entities_by_id[entity["id"]] = entity + parent_id = entity["parent_id"] + entities_by_parent_id[parent_id].append(entity) + + ftrack_hierarchy = [] + ftrack_id_queue = collections.deque() + ftrack_id_queue.append((project_id, ftrack_hierarchy)) + while ftrack_id_queue: + item = ftrack_id_queue.popleft() + ftrack_id, parent_list = item + if ftrack_id == project_id: + entity = ft_project + name = entity["full_name"] + else: + entity = entities_by_id[ftrack_id] + name = entity["name"] + + children = [] + parent_list.append({ + "name": name, + "low_name": name.lower(), + "entity": entity, + "children": children, + }) + for child in entities_by_parent_id[ftrack_id]: + ftrack_id_queue.append((child["id"], children)) + return ftrack_hierarchy + + def find_matching_ftrack_entities( + self, hierarchy_context, ftrack_hierarchy + ): + walk_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + walk_queue.append( + (entity_name, entity_data, ftrack_hierarchy) + ) + + matching_ftrack_entities = [] + while walk_queue: + item = walk_queue.popleft() + entity_name, entity_data, ft_children = item + matching_ft_child = None + for ft_child in ft_children: + if ft_child["low_name"] == entity_name.lower(): + matching_ft_child = ft_child + break + + if matching_ft_child is None: + continue + + entity = matching_ft_child["entity"] + entity_data["ft_entity"] = entity + matching_ftrack_entities.append(entity) + + hierarchy_children = entity_data.get("childs") + if not hierarchy_children: + continue + + for child_name, child_data in hierarchy_children.items(): + walk_queue.append( + (child_name, child_data, matching_ft_child["children"]) + ) + return matching_ftrack_entities + + def query_custom_attribute_values(self, session, entities, hier_attrs): + attr_ids = { + attr["id"] + for attr in hier_attrs + } + entity_ids = { + entity["id"] + for entity in entities + } + output = { + entity_id: {} + for entity_id in entity_ids + } + if not attr_ids or not entity_ids: + return {} + + joined_attr_ids = ",".join( + ['"{}"'.format(attr_id) for attr_id in attr_ids] + ) + + # Query values in chunks + chunk_size = int(5000 / len(attr_ids)) + # Make sure entity_ids is `list` for chunk selection + entity_ids = list(entity_ids) + results = [] + for idx in range(0, len(entity_ids), chunk_size): + joined_entity_ids = ",".join([ + '"{}"'.format(entity_id) + for entity_id in entity_ids[idx:idx + chunk_size] + ]) + results.extend( + session.query( + ( + "select value, entity_id, configuration_id" + " from CustomAttributeValue" + " where entity_id in ({}) and configuration_id in ({})" + ).format( + joined_entity_ids, + joined_attr_ids + ) + ).all() + ) + + for result in results: + attr_id = result["configuration_id"] + entity_id = result["entity_id"] + output[entity_id][attr_id] = result["value"] + + return output def import_to_ftrack(self, project_name, input_data, parent=None): # Prequery hiearchical custom attributes From a78ef54e56e7a0a0300fdc140ec40fb1be4111e9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:35:02 +0100 Subject: [PATCH 270/409] query user at the start of import method instead of requerying it again --- .../publish/integrate_hierarchy_ftrack.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 02946f813f..5d30b9bf7b 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -234,6 +234,16 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): attr["key"]: attr for attr in hier_custom_attributes } + # Query user entity (for comments) + user = self.session.query( + "User where username is \"{}\"".format(self.session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + self.session.api_user + ) + ) # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] @@ -364,25 +374,18 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # Create notes. - user = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) - ).first() - if user: - for comment in entity_data.get("comments", []): + entity_comments = entity_data.get("comments") + if user and entity_comments: + for comment in entity_comments: entity.create_note(comment, user) - else: - self.log.warning( - "Was not able to query current User {}".format( - self.session.api_user - ) - ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) + + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + self.session._configure_locations() + six.reraise(tp, value, tb) # Import children. if 'childs' in entity_data: From 36afd8aa7c3a9a88001c20f6c0ae8c616a2bf51a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:36:21 +0100 Subject: [PATCH 271/409] import to ftrack is not recursion based but queue based method --- .../publish/integrate_hierarchy_ftrack.py | 26 ++++++++++++++----- 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 5d30b9bf7b..12e89a1884 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -227,7 +227,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return output - def import_to_ftrack(self, project_name, input_data, parent=None): + def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes hier_custom_attributes = get_pype_attr(self.session)[1] hier_attr_by_key = { @@ -247,8 +247,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] - for entity_name in input_data: - entity_data = input_data[entity_name] + # Use queue of hierarchy items to process + import_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + import_queue.append( + (entity_name, entity_data, None) + ) + + while import_queue: + item = import_queue.popleft() + entity_name, entity_data, parent = item + entity_type = entity_data['entity_type'] self.log.debug(entity_data) self.log.debug(entity_type) @@ -388,9 +397,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): six.reraise(tp, value, tb) # Import children. - if 'childs' in entity_data: - self.import_to_ftrack( - project_name, entity_data['childs'], entity) + children = entity_data.get("childs") + if not children: + continue + + for entity_name, entity_data in children.items(): + import_queue.append( + (entity_name, entity_data, entity) + ) def create_links(self, project_name, entity_data, entity): # Clear existing links. From 5de422dea2c294bcc1ff097c272180b272e89e8a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 16:38:04 +0100 Subject: [PATCH 272/409] change how custom attributes are filled on entities and how entities are created --- .../publish/integrate_hierarchy_ftrack.py | 156 +++++++++--------- 1 file changed, 82 insertions(+), 74 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index 12e89a1884..046dfd9ad8 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -229,10 +229,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes - hier_custom_attributes = get_pype_attr(self.session)[1] + hier_attrs = get_pype_attr(self.session)[1] hier_attr_by_key = { attr["key"]: attr - for attr in hier_custom_attributes + for attr in hier_attrs } # Query user entity (for comments) user = self.session.query( @@ -244,6 +244,19 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): self.session.api_user ) ) + + # Query ftrack hierarchy with parenting + ftrack_hierarchy = self.query_ftrack_entitites( + self.session, self.ft_project) + + # Fill ftrack entities to hierarchy context + # - there is no need to query entities again + matching_entities = self.find_matching_ftrack_entities( + hierarchy_context, ftrack_hierarchy) + # Query custom attribute values of each entity + custom_attr_values_by_id = self.query_custom_attribute_values( + self.session, matching_entities, hier_attrs) + # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] @@ -260,75 +273,87 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): entity_type = entity_data['entity_type'] self.log.debug(entity_data) - self.log.debug(entity_type) - if entity_type.lower() == 'project': - entity = self.ft_project - - elif self.ft_project is None or parent is None: + entity = entity_data.get("ft_entity") + if entity is None and entity_type.lower() == "project": raise AssertionError( "Collected items are not in right order!" ) - # try to find if entity already exists - else: - query = ( - 'TypedContext where name is "{0}" and ' - 'project_id is "{1}"' - ).format(entity_name, self.ft_project["id"]) - try: - entity = self.session.query(query).one() - except Exception: - entity = None - # Create entity if not exists if entity is None: - entity = self.create_entity( - name=entity_name, - type=entity_type, - parent=parent - ) + entity = self.session.create(entity_type, { + "name": entity_name, + "parent": parent + }) + entity_data["ft_entity"] = entity + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES - custom_attributes = entity_data.get('custom_attributes', []) - instances = [ - instance - for instance in self.context - if instance.data.get("asset") == entity["name"] - ] + custom_attributes = entity_data.get('custom_attributes', {}) + instances = [] + for instance in self.context: + instance_asset_name = instance.data.get("asset") + if ( + instance_asset_name + and instance_asset_name.lower() == entity["name"].lower() + ): + instances.append(instance) for instance in instances: instance.data["ftrackEntity"] = entity - for key in custom_attributes: + for key, cust_attr_value in custom_attributes.items(): + if cust_attr_value is None: + continue + hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical if not hier_attr: - assert (key in entity['custom_attributes']), ( - 'Missing custom attribute key: `{0}` in attrs: ' - '`{1}`'.format(key, entity['custom_attributes'].keys()) + if key not in entity["custom_attributes"]: + raise KnownPublishError(( + "Missing custom attribute in ftrack with name '{}'" + ).format(key)) + + entity['custom_attributes'][key] = cust_attr_value + continue + + attr_id = hier_attr["id"] + entity_values = custom_attr_values_by_id.get(entity["id"], {}) + # New value is defined by having id in values + # - it can be set to 'None' (ftrack allows that using API) + is_new_value = attr_id not in entity_values + attr_value = entity_values.get(attr_id) + + # Use ftrack operations method to set hiearchical + # attribute value. + # - this is because there may be non hiearchical custom + # attributes with different properties + entity_key = collections.OrderedDict(( + ("configuration_id", hier_attr["id"]), + ("entity_id", entity["id"]) + )) + op = None + if is_new_value: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": cust_attr_value} ) - entity['custom_attributes'][key] = custom_attributes[key] - - else: - # Use ftrack operations method to set hiearchical - # attribute value. - # - this is because there may be non hiearchical custom - # attributes with different properties - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = hier_attr["id"] - entity_key["entity_id"] = entity["id"] - self.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - entity_key, - "value", - ftrack_api.symbol.NOT_SET, - custom_attributes[key] - ) + elif attr_value != cust_attr_value: + op = ftrack_api.operation.UpdateEntityOperation( + "CustomAttributeValue", + entity_key, + "value", + attr_value, + cust_attr_value ) + if op is not None: + self.session.recorded_operations.push(op) + + if self.session.recorded_operations: try: self.session.commit() except Exception: @@ -342,7 +367,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): for instance in instances: task_name = instance.data.get("task") if task_name: - instances_by_task_name[task_name].append(instance) + instances_by_task_name[task_name.lower()].append(instance) tasks = entity_data.get('tasks', []) existing_tasks = [] @@ -500,21 +525,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): return task - def create_entity(self, name, type, parent): - entity = self.session.create(type, { - 'name': name, - 'parent': parent - }) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - return entity - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which @@ -536,19 +546,17 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] - active_assets = [] + active_assets = set() # filter only the active publishing insatnces for instance in context: if instance.data.get("publish") is False: continue - if not instance.data.get("asset"): - continue - - active_assets.append(instance.data["asset"]) + asset_name = instance.data.get("asset") + if asset_name: + active_assets.add(asset_name) # remove duplicity in list - active_assets = list(set(active_assets)) - self.log.debug("__ active_assets: {}".format(active_assets)) + self.log.debug("__ active_assets: {}".format(list(active_assets))) return get_pure_hierarchy_data(hierarchy_context) From 27e4985d488401e944a44c7b705f36f407f985e7 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:04:54 +0000 Subject: [PATCH 273/409] Extract Alembic animation from Blender --- .../plugins/publish/extract_abc_animation.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 openpype/hosts/blender/plugins/publish/extract_abc_animation.py diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py new file mode 100644 index 0000000000..80f7a4ba58 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -0,0 +1,75 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY + + +class ExtractAnimationABC(publish.Extractor): + """Extract as ABC.""" + + label = "Extract Animation ABC" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.abc" + filepath = os.path.join(stagingdir, filename) + + context = bpy.context + scene = context.scene + view_layer = context.view_layer + + # Perform extraction + self.log.info("Performing extraction..") + + plugin.deselect_all() + + selected = [] + asset_group = None + + objects = [] + for obj in instance: + if isinstance(obj, bpy.types.Collection): + for child in obj.all_objects: + objects.append(child) + for obj in objects: + children = [o for o in bpy.data.objects if o.parent == obj] + for child in children: + objects.append(child) + + for obj in objects: + obj.select_set(True) + selected.append(obj) + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # We export the abc + bpy.ops.wm.alembic_export( + context, + filepath=filepath, + selected=True, + flatten=False + ) + + plugin.deselect_all() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) From 36effdce72a3f6d4e82ce9150f4c61425af2fa49 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:12:34 +0000 Subject: [PATCH 274/409] Hound fixes --- .../hosts/blender/plugins/publish/extract_abc_animation.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py index 80f7a4ba58..e141ccaa44 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -4,7 +4,6 @@ import bpy from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractAnimationABC(publish.Extractor): @@ -22,8 +21,6 @@ class ExtractAnimationABC(publish.Extractor): filepath = os.path.join(stagingdir, filename) context = bpy.context - scene = context.scene - view_layer = context.view_layer # Perform extraction self.log.info("Performing extraction..") From 484a77a3adcebf046bba1b2eac0c915abd631213 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:11:37 +0100 Subject: [PATCH 275/409] integrate ftrack api always create new session for each instance processing --- .../plugins/publish/integrate_ftrack_api.py | 56 ++++++++++++++++--- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 159e60024d..1d65a53a4a 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -36,10 +36,42 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return context = instance.context - session = context.data["ftrackSession"] + task_entity, parent_entity = self.get_instance_entities( + instance, context) + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + context_session = context.data["ftrackSession"] + ftrack_api = context.data["ftrackPythonModule"] + # Create new session for uploading + # - this was added to prevent failed uploads due to connection lost + # it is possible it won't fix the issue and potentially make it worse + # in that case new session should not be created and should not be + # closed at the end. + # - also rename variable 'context_session' -> 'session' + session = ftrack_api.Session( + context_session.server_url, + context_session.api_key, + context_session.api_user, + auto_connect_event_hub=False, + ) + try: + self.integrate_to_ftrack( + session, + instance, + task_entity, + parent_entity, + component_list + ) + finally: + session.close() + + def get_instance_entities(self, instance, context): parent_entity = None - default_asset_name = None # If instance has set "ftrackEntity" or "ftrackTask" then use them from # instance. Even if they are set to None. If they are set to None it # has a reason. (like has different context) @@ -52,15 +84,21 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity = context.data.get("ftrackEntity") if task_entity: - default_asset_name = task_entity["name"] parent_entity = task_entity["parent"] - if parent_entity is None: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return + return task_entity, parent_entity + + def integrate_to_ftrack( + self, + session, + instance, + task_entity, + parent_entity, + component_list + ): + default_asset_name = None + if task_entity: + default_asset_name = task_entity["name"] if not default_asset_name: default_asset_name = parent_entity["name"] From f821337a8ee552aa91d5c7d7275c3a1d82bd3c24 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:56:17 +0100 Subject: [PATCH 276/409] don't create and close existing connections --- .../plugins/publish/integrate_ftrack_api.py | 52 +++++-------------- 1 file changed, 13 insertions(+), 39 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 1d65a53a4a..231bd8e81e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -40,25 +40,15 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): instance, context) if parent_entity is None: self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." ).format(str(instance))) return - context_session = context.data["ftrackSession"] - ftrack_api = context.data["ftrackPythonModule"] - # Create new session for uploading - # - this was added to prevent failed uploads due to connection lost - # it is possible it won't fix the issue and potentially make it worse - # in that case new session should not be created and should not be - # closed at the end. - # - also rename variable 'context_session' -> 'session' - session = ftrack_api.Session( - context_session.server_url, - context_session.api_key, - context_session.api_user, - auto_connect_event_hub=False, - ) + session = context.data["ftrackSession"] + # Reset session and reconfigure locations + session.reset() + try: self.integrate_to_ftrack( session, @@ -67,8 +57,10 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity, component_list ) - finally: - session.close() + + except Exception: + session.reset() + raise def get_instance_entities(self, instance, context): parent_entity = None @@ -224,13 +216,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.info("Setting task status to \"{}\"".format(status_name)) task_entity["status"] = status - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) @@ -533,13 +519,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.delete(member) del(member) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() # Reset members in memory if "members" in component_entity.keys(): @@ -655,13 +635,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) else: # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _create_components(self, session, asset_versions_data_by_id): for item in asset_versions_data_by_id.values(): From 3325ee03306dcdc9a5de81f26c7c6d6e6dd16a0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:18:35 +0100 Subject: [PATCH 277/409] :art: change how the instances are deleted and instance look --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++----- .../houdini/plugins/create/create_hda.py | 1 + 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b5f79838d1..a1c10cd18b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -43,7 +43,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.nodes = list() + self.nodes = [] def process(self): """This is the base functionality to create instances in Houdini @@ -181,6 +181,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): instance_node = self.create_instance_node( subset_name, "/out", node_type) + self.customize_node_look(instance_node) + instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( self.family, @@ -245,15 +247,30 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) + if instance_node: + instance_node.destroy() + self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): return [ BoolDef("use_selection", label="Use selection") ] + + @staticmethod + def customize_node_look( + node, color=hou.Color((0.616, 0.871, 0.769)), + shape="chevron_down"): + """Set custom look for instance nodes. + + Args: + node (hou.Node): Node to set look. + color (hou.Color, Optional): Color of the node. + shape (str, Optional): Shape name of the node. + + Returns: + None + + """ + node.setUserData('nodeshape', shape) + node.setColor(color) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 590c8f97fd..4bed83c2e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -70,6 +70,7 @@ class CreateHDA(plugin.HoudiniCreator): hda_node = to_hda hda_node.setName(node_name) + self.customize_node_look(hda_node) return hda_node def create(self, subset_name, instance_data, pre_create_data): From d65eadb9b76f2f9bab0806adfcc83849c09328d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:23:12 +0100 Subject: [PATCH 278/409] :bug: fix function call in argument --- openpype/hosts/houdini/api/plugin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index a1c10cd18b..e15e27c83f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -259,7 +259,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): @staticmethod def customize_node_look( - node, color=hou.Color((0.616, 0.871, 0.769)), + node, color=None, shape="chevron_down"): """Set custom look for instance nodes. @@ -272,5 +272,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): None """ + if not color: + color = hou.Color((0.616, 0.871, 0.769)) node.setUserData('nodeshape', shape) node.setColor(color) From b91c3f5630f9e21db6361452a3fa0b732d3a759f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:20:14 +0100 Subject: [PATCH 279/409] celaction rename hook --- .../hooks/{pre_celaction_registers.py => pre_celaction_setup.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/celaction/hooks/{pre_celaction_registers.py => pre_celaction_setup.py} (100%) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py similarity index 100% rename from openpype/hosts/celaction/hooks/pre_celaction_registers.py rename to openpype/hosts/celaction/hooks/pre_celaction_setup.py From 0f4c4ea6cbaac1870aa1b379d86b50ace6053582 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:21:40 +0100 Subject: [PATCH 280/409] default launcher --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 03499a8567..7f375a0a20 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1273,7 +1273,7 @@ "variant_label": "Local", "use_python_2": false, "executables": { - "windows": [], + "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], "darwin": [], "linux": [] }, From 609beaa75abcdf7c058b7b14deac0f6997d18b12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Thu, 24 Nov 2022 14:38:54 +0100 Subject: [PATCH 281/409] :bug: add all connections if file nodes are not connected using their "primary data" node, `listHistory` was ignoring them --- openpype/hosts/maya/plugins/publish/collect_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 157be5717b..e1adffaaaf 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -403,13 +403,13 @@ class CollectLook(pyblish.api.InstancePlugin): # history = cmds.listHistory(look_sets) history = [] for material in materials: - history.extend(cmds.listHistory(material)) + history.extend(cmds.listHistory(material, ac=True)) # handle VrayPluginNodeMtl node - see #1397 vray_plugin_nodes = cmds.ls( history, type="VRayPluginNodeMtl", long=True) for vray_node in vray_plugin_nodes: - history.extend(cmds.listHistory(vray_node)) + history.extend(cmds.listHistory(vray_node, ac=True)) # handling render attribute sets render_set_types = [ From 1b882cb7caddf5921b5e12a921f0b7c6182346b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:47:44 +0100 Subject: [PATCH 282/409] celaction: recognize hook --- openpype/hosts/celaction/addon.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py index c6d30935a1..9158010011 100644 --- a/openpype/hosts/celaction/addon.py +++ b/openpype/hosts/celaction/addon.py @@ -11,6 +11,13 @@ class CelactionAddon(OpenPypeModule, IHostAddon): def initialize(self, module_settings): self.enabled = True + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(CELACTION_ROOT_DIR, "hooks") + ] + def add_implementation_envs(self, env, _app): # Set default values if are not already set via settings defaults = { From 1f38d061ce64729becdbd5c79abe1c3dfb30f2ca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:48:15 +0100 Subject: [PATCH 283/409] celaciton: resolving cli to hook --- .../celaction/hooks/pre_celaction_setup.py | 31 +++++++------------ 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 84ac3d130a..cda268d669 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,8 +1,13 @@ import os import shutil +import subprocess import winreg -from openpype.lib import PreLaunchHook -from openpype.hosts.celaction import api as celaction +from openpype.lib import PreLaunchHook, get_openpype_execute_args +from openpype.hosts.celaction import api as caapi + +CELACTION_API_DIR = os.path.dirname( + os.path.abspath(caapi.__file__) +) class CelactionPrelaunchHook(PreLaunchHook): @@ -19,13 +24,6 @@ class CelactionPrelaunchHook(PreLaunchHook): if workfile_path: self.launch_context.launch_args.append(workfile_path) - project_name = self.data["project_name"] - asset_name = self.data["asset_name"] - task_name = self.data["task_name"] - - # get publish version of celaction - app = "celaction_publish" - # setting output parameters path_user_settings = "\\".join([ "Software", "CelAction", "CelAction2D", "User Settings" @@ -36,29 +34,24 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") + path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + subproces_args = get_openpype_execute_args("run", path_to_cli) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executable + subprocess.list2cmdline(subproces_args) ) parameters = [ - "launch", - f"--app {app}", - f"--project {project_name}", - f"--asset {asset_name}", - f"--task {task_name}", "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", "--resolutionHeight *Y*", - # "--programDir \"'*PROGPATH*'\"" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, @@ -109,9 +102,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(os.path.dirname( - os.path.abspath(celaction.__file__) - )) + openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", From 54bb955d8b914085582ca6061a84a6c003743982 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:57:44 +0100 Subject: [PATCH 284/409] celaction: improving setup hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cda268d669..76a2e43452 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -42,10 +42,10 @@ class CelactionPrelaunchHook(PreLaunchHook): "SubmitAppTitle", 0, winreg.REG_SZ, - subprocess.list2cmdline(subproces_args) + subproces_args.pop(0) ) - parameters = [ + parameters = subproces_args + [ "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", From 00bbd4ebe1afbcdcd15668af7611947aac4ecc5c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:09:34 +0100 Subject: [PATCH 285/409] celation executable fix --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 76a2e43452..aa04c8c088 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -36,13 +36,14 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) + openpype_executables = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - subproces_args.pop(0) + openpype_executables ) parameters = subproces_args + [ From a3c55730c164d5b03444c7dcad46d6086ebf0c16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:20:44 +0100 Subject: [PATCH 286/409] celaction removing unused code --- openpype/hosts/celaction/api/cli.py | 30 +++++-------------- .../celaction/hooks/pre_celaction_setup.py | 1 + 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 88fc11cafb..7b2542ed06 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -1,6 +1,5 @@ import os import sys -import copy import argparse import pyblish.api @@ -13,10 +12,9 @@ from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger.get_logger("Celaction_cli_publisher") - -publish_host = "celaction" +log = Logger.get_logger("celaction") +PUBLISH_HOST = "celaction" HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") @@ -46,28 +44,16 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -def _prepare_publish_environments(): - """Prepares environments based on request data.""" - env = copy.deepcopy(os.environ) +# def _prepare_publish_environments(): +# """Prepares environments based on request data.""" +# env = copy.deepcopy(os.environ) - project_name = os.getenv("AVALON_PROJECT") - asset_name = os.getenv("AVALON_ASSET") - - env["AVALON_PROJECT"] = project_name - env["AVALON_ASSET"] = asset_name - env["AVALON_TASK"] = os.getenv("AVALON_TASK") - env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction/local" - - env["PYBLISH_HOSTS"] = publish_host - - os.environ.update(env) +# os.environ.update(env) def main(): # prepare all environments - _prepare_publish_environments() + # _prepare_publish_environments() # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -76,7 +62,7 @@ def main(): log.info(f"Registering path: {PUBLISH_PATH}") pyblish.api.register_plugin_path(PUBLISH_PATH) - pyblish.api.register_host(publish_host) + pyblish.api.register_host(PUBLISH_HOST) return host_tools.show_publish() diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index aa04c8c088..5662f7f38f 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -54,6 +54,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionWidth *X*", "--resolutionHeight *Y*", ] + winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, " ".join(parameters) From 3aa578047997641b304393ec613701e83c8876fe Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:31:59 +0100 Subject: [PATCH 287/409] celaction: clearing old code --- openpype/hosts/celaction/api/cli.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 7b2542ed06..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -44,17 +44,7 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -# def _prepare_publish_environments(): -# """Prepares environments based on request data.""" -# env = copy.deepcopy(os.environ) - -# os.environ.update(env) - - def main(): - # prepare all environments - # _prepare_publish_environments() - # Registers pype's Global pyblish plugins install_openpype_plugins() From 2f80dcc950b88581bf0b7e2c4daa37fef7f80802 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:32:36 +0100 Subject: [PATCH 288/409] celaction: fixing scene env and adding chunk --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 5662f7f38f..b14fb12797 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -47,8 +47,8 @@ class CelactionPrelaunchHook(PreLaunchHook): ) parameters = subproces_args + [ - "--currentFile \\\"\"*SCENE*\"\\\"", - "--chunk 10", + "--currentFile *SCENE*", + "--chunk *CHUNK*", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", From bdda909f1f319c76562d0f62881ca88b09780912 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:41:26 +0100 Subject: [PATCH 289/409] celaction icon --- openpype/resources/app_icons/celaction.png | Bin 0 -> 4012 bytes .../resources/app_icons/celaction_local.png | Bin 40783 -> 0 bytes .../resources/app_icons/celaction_remotel.png | Bin 36400 -> 0 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 openpype/resources/app_icons/celaction.png delete mode 100644 openpype/resources/app_icons/celaction_local.png delete mode 100644 openpype/resources/app_icons/celaction_remotel.png diff --git a/openpype/resources/app_icons/celaction.png b/openpype/resources/app_icons/celaction.png new file mode 100644 index 0000000000000000000000000000000000000000..86ac092365d009052c4e0351732ad1dd067329de GIT binary patch literal 4012 zcmc(ic|26@`^WELWY5?hQj8@eWr-Gr8Cyu|$r3_CvW#sQV@t-M>_SK(WGgL}Qg$=4 zmc}mq$dW8QWo#4EFwFPVZ~5>0@9+D^dEMuJU!VIr*SXH?T=#V+{(ZqzkWY#a03c{? zW^4rj47!B@o;}dugn!}z4ZHzn4nY9$i~hb~AU9tUg4_giOB3#|+>!_doj6Y2jX%i5 zo?sA6@WSCiMdb%G2ni4%Wb7W~hQ|gFusDAp$}6V;alYT&2p15J$9j4Zz;mGwry;uc z55>E?{+7xTuzv2~I75gRvhe=4SmQhhp{{s$;2gmIE$9E^92?~3>k3KnppH_T24SAx z*v=L2kM;Kil>H0w5aa&u^}%>Icd)^Z{qestBb=X~yFUSJez-@5*q+}q8!W-s9Xwec zQidW5H8(b}3CpKVTKXPfgm zWlL6rgkbP9oY;UH>}a%8?2!l&c!mQ9mk5PBbG1#y=Af9btUxhGyP~7T?6R&-x{ViY zsduSw>BACz=X0}BpYB~IOB=_IzhKKnvb;LZ1J)f;Ic(37G9$<{yRlS8?76x6ZlPo))XeR|A5ESt^l}N=n1Ps}$B(WJm#YpPA zwltjCPDmj#=NTew$?ooRiM6jvMVI_nYUTPS1t%^y&6~TG9qN_CCHf+{t=LaRTdhY$ z{~sH~FR9MZfwl3K;uo@e)eH;^p=?rhdB_N6bu&udFSL z1%4H9=@RtWeSsA3@+OkCeobW%oxDw^RLbvYk@lT<&fKqR(lD`dg_zKVjM;_q*F6rI z9s{lSdOADGYjawG{&K$C+qg5~p8m~|RqWTw?5%D8UuQq^%<;iAV%5gh*=O@3JfCux zGlSwIfL52@rs^@fQ}=JseX1|g7ljzWqlaQ3V{SG1@pajjOTMv4q%vbmJmyP;e< z$gjLB>|m1<1tIfMtu#7S>ZTHHX1aJ~pMx=&8>n8uMTRL5j^KAKqr$xN9GT*?jeBAY{n_kI48e7?y9&*5Cs002%UK!p*#VXr zCoCPNjT(A;o|4s|tjs=Cz(AS{tT?HiV)nCXS}i#X@d!}ScwlY~Gy551xj~UyGCT8R zm^Gv1a<0;H?E#*yMHY%`(<6y#yybT=0O$8n(~ii2@*Y~jM=#=aUYLfWM6B*-^~ck! zw#Z!M4gy$fcqR2uPZ8cp+sF13{IFULoU$uneCgG$3e6$<^>XA0&6$(5&9>DG9nlOR z(!tWdE}a8-10!hE(gj9#)K1cZ>5+Q48KqM;ZIu~0dRLh$l=SYy3V-AAo%)faR!@5q z(2=5|-y^C9yJk#R!10jo9~@r>O_0(dnZ^)e$Z1E(HV(-KoxiQ5(=xT$Emfz7PI(ZK<;g&DaDKI~FgZw%VVSgi%@!lye#W-0Y;v-QnE~f;f&U}8 zay^~v0>WBH^eqrlw^x@2@xm z>zg8-vO=XKEs~GbpLJaMFej-I)_k~fQ}$Y1=?t#;|i zzGPN_{y6XOmj~}3V>TUn)!$-KR;C~!A!639?C+(B_Noxot!xqNo9W!2IVBB7Uf*3)nP^ohzvh8sc)KAvlRqW`Tf^2d)s4wuDnG+n`thh)+Cre5ypgzLl= zqAX+U#W1-x@_-qwcP}-~Ne)yFzV5t1MtB{-+@hG9#R^H!4KClq60ZyQMjHu8tvMgQ zsw;kInlKw`oEybA_sxWi8HQf*`hM6zcM(F8mzs89Mx-b$xi@-d*M9muT|xO2r=?{)wgp@c+NV4% zi}7X6#MxMIIjHR|lwS(9d5XEI8t+*`-Cu8g*z8ApUgkZWk2Q7@x;nqsCMsmMSC6DG z(C_%}?%Ey+CV>RYolU{;`t^@J7}Nz(ZPa~aU2|gELyuJ_>4@OA%+cFA6_bb0&C}Uw zWimhN;Ry!eQ^_MNZcl?!rC)QGFI_=SWexFoinpgskQ`2LeveVkTS;X$eOi--qXoRD zRa026FXge9X~mG|v@%%QRpT?8ql%{Ya@pr-e<@c5&fqlnl$_|Xr56&;Mlfc73op;= z&@R$Hndb*TS6ju|9pE9U)Qq#RGW*+wORQa9!!mU@UlH9tA$Va3sLX9$6!6zyG4_rr z%wEI~lCa38`Oxm8f@27vdFmAZ_3iqTaH;Oa^?!2I&-%y2tKvFCLLr61@fj4Gn)+Fr zZEouR^`G9r-uB~g^!e|Ymh^4Ys;lgQlXv>(r|e4D>H1FL7T@b|9aa|fa8v!H0g{m2 z)ZG`z)LqG-^wPYj#cpiEl&A|iV%G%JJ)?ZiWR zV&jO-A?a&>GH<%oEC=L=Hp=3j_&;{$o+`Q_Ulo&D+uO6B!$qL+jlG26a`Jcc@z-c$ z?N2#&lS#Wq{~V%Ot#?(dC%B6&Ct{#(Zn$p_?W2f-{?;TYg86U@XLe=Jw36(qs|!bQs)Z=Yf3Y4sDfDM zGhWsH__g;>iP>7{h8h)##W!cYT?9i)rAMY&b|0b5wj+p~QuRsFt6H5_)U~fIjEAJz z&@vo%D)c1n^o{1FX2wFXWZgt%@}gc!Z6R7{zSfb(8{HCOkss>0Zs<1h__9E^B}KD- za`=i;9wzo-!9vumox(|SLc-!@_jf)&W!n8x9j{(f^A;rgwxQvau$NMvLQy|SZG zVx6Iz7A-3*3@+(Zlsg+fK^JZ~aZ0p|cWjRipjO&Cwk>1m`v3D;htG|R-@@MzTX)6I z?!-MbcoOi+?<@km@>>U#^9n#d=)ZJ#MysP}?R|X3%O~gJ3Ki@5!Ne#QOsU{{8>VLVfYI+sLKj%-l10-WB)&1Ppb7D`0j_IxU zj-eWF1}fip5v`vYGO+(pL7k3&c;Y3!4Epk1ny#Qc=pwB38XS}42)f#@vhDst=Uvy5gKiYDm`KL0@{yyLun_qMkS+(Qg2E_y&#XGj|H6_$CsE>P|Mf( z*1+QeTiqu+q`FhSXiXSs2BxuIem-Q=;XY#`Cyp7**{ZbrR$)7D*4P%kTROt1Xmby< zajQChSRB)6;PdwjNonXz)us2JE*bwU8xCVl|J0U@m;O-jS?!)_? zbH4lIo;z!?m>IhJsjjNtRkf>$R#B41LMKHBfk0TYG7@Sa5IE`i2NfCk2HyE@4EROj zE~)MQ-pSJ4%f!_JBx>$tY5|pXFtM^wvoJCDaT&G{1c4A3Y}B>gwG|ck&72%qO`gZF zdOJ7+vq2z15pQP`Gdl}+sHugOjiV6Facd_H)W%$hMvGgKUC~+G!rDg0*VW>^uadf% zubmm6IgN-gRM49rSir%;-303GVDIS0?=3|0pLO|x_s?Im(Lnz*#NAGa=Klz#t*8PO zcXG9Wa^;AyVr zmi%fGQvdZV;FA!IwY$4BKO38umlvzoTUIAmD>e>3K0Y>fPBufrD{qut!qEdH;&|DOxHsrxuvu&G(NIeEC60fw`reSVZPzqqS~iMx}l zx|5Ure``_2+R5F?&DzNsDlY!4H9jb#mW`vilb74;|IATTf6nOjLqbijSR#`@iQ(IGK4kSU9@>_gwS; zJD1~so%`$#4$i=nB`jQRJT1(nT%8=C|JgRb&HpM3yX616-ha(C|6gTcm-=7lvH`-d zJsbD`W$gd{3h9gu_=<$cSz{|q?b~EWGd0ZpUOGe zBBub>BaVnr0~q7>Vvk(HYLt* zM0E?EbRh&T1U)?*b=)1fT`l=lCw@b8kB|Fnu|U24PG@^Gjwv1xke`q}$r-URS}%ug zJ?G@ODcA7v?8BYQF#U&X*p)ZIk(meP+Uw=(0iFc|uf96K2i}{D}t(+GH8F(9+#+t**^s8E76%Ue!o=8$(rQid3M}-+|eqq8ip^20OD9(x)}t) z9NSI@rnTE!4&J4XTHM11E@)^o{@StwmX|{y4LEadi4$?}k9)1GjbEJO8q@l3T0TIq zH|*|OVBp)N@V@Y6ZDRBC{T9!XEn3uAmD6aG=0-of5)U}ig>=qs3Y3N{>!u_qXG?J; zZx?KebGMX)F4V8ewAx1wJD(mIyuCuq1Rt;0kF+{((_0^J_ZL0p47O*hjjE~q)CcA) z^l4q!9rd^Y1qkq1AJAY|C!acMxoh}9n3Y5x+(l3WvsV^<5=Ge`|JYSuAgG}2_>vf9 zxTFsyeX!>yffFAJc6GC!h`5Sw5eRe`J?v@Sft~+dTzp(TsQcQ+ZnzmO+)ACwi_(DAQ`F9_n6W6 zf;*JNWbUtIXyi~n4(hYM2cc*<==@^azG(9~U-)g6cR0bn&)jG}jE5iN85fc0vYD>c z!c^7NV-R?=dDOi9>1X3{ipy6G+;R-$}7xaV;pkI|Zj&oJ$4ACJz1=rZ}Ruga4(@0M%^_KsAhi7 z^{^uyIJL*aAooSZYhP7w9Psc$6!PZNs%XiY)X3wSFpw4&Mz|+hRvtP zZr-m>!`A}Il9Rvp0M*%2IV*gK4Dvih?t1FLIZP+kP@fjkq#D-GID$-n>7L}*tac7%jv{yQbl!{~^A@A^^;z|Z2058576mvvdn=+I3zNgz`NuyBS3k$Ho;8#y82{!NxZ~K_ z4jdq{YAXKZ`{o`WNDt2T-lXE?itA*DB!Ckh%%=)(<+?)4nldjy_k$vjr)mY_w1uZi zC;Uics{ykhJ8I36D)aanP5X>72nQ)L_O#0|>|?$2s+iX^nsNAD+W+)EBo=t!$Y3)% z>Ug}UzB}#1S3a2~B00wYeZi7S2^__@&g8GLjgnn3tCt@y8(Y2F)+vrQQkCDCm=4yM z(d~G3-sRr?nd{i+*vmHl;L$M;aQw zKm-NtCI{fUl-M*z030uNb-{z=2l_4JiQ#cpC)X)VmsD3a&2SV)CR*CUTeRQd0vI^W zn;Cll4%tTUj%$B&fJl_PYE>vZXxDk=$~v#wk1y7N5W(Z{E5T&Y`h@sR>rv-Z;8j0c z;E#Y^d84y08{v+rrphx+SSPn8LIf4@>Tbnj+Ej}1*O$guF|CBl;_KbU{2NjH1E@ofn z+s-MZx(m5~NL1i&$f!t@wF@>&iL(P^%T)!Pgi?`%DW#;O3dP}Q>E$9P!Pqsd25k)0 z7V&=$h~e%u-sCjcSHrvxPY+jKc@GCxi$RauL4W#)NgAx$zuYc#KHYV;K3yk1Ij5nT zI@C$6H`tnkra8v^EiozYR3!-_B+ofGIZ6F)*F_%9u0BrsOSO6)y$l^Hcd*2S@q-0r zethY3Pxz$3^}n zUlkxk3$x_;O&4W@Wm>zM|?hC2f8?EVhE!_o0@=yXR+-#SmI zXP(25*Z#Xm-9a|1r9!6^gC|XoE#alKA1IMTn4M`ocIhNoNwnG`uoPHYN#m<#fFPDT{tl2uV4vh>EXy0k!AQ*yT*^k{V~8MVt9 z8(;M5Z}g`GzM$9GRyh0Dqn-EiBBVe3pRJETiH>wZaKiWIpw!KT9?nhIRZ z?fxT;JU^!Skq6_mmU+o;74b1|=9?DKpwD)$6~3AIuV4b?pQ*tY8_TK3^m$MJibXCm zh8De+$44VA+Qc3oKabaeWrS9`2fCV{Q-r%6X_38osCF!~-T+_#;#M>0Xo`;ZCglb3 zOxH%sewAC^OGP0PY=au2o>?#3*7wpk&5w9L-dSVZ9SncwY}3pO$rEZ74d;=7P>9bCPXU z6$+(y7mIFvRB^TiIn%L-z1lQ$lH7`DqK>{+>hk6H#u=n%AI~)j4%3VhRu=wBuXeF~ zmeF|PUdJ>$#lK+r8&pKTPC*hXjm=#td>XAhUqnpe!I$|+>}g|&GD@6%-; z)U{~53M?~ZEj`>TBv^cSAo92_($eg<{d^)0H#AZ6>wv7XwezmE^QTzclu+#$JBY*b zJZ|Oug7Hpa#eP!gife+Mlhg{PHY_G9obgSAwOd+MqUaH=I)wG^) zo{N>pKjMVV$C@F83^f>IuK)UPJpV*PSHl2^D89_ezFw7`dm7cQL6=rRTR;Ugsb>{; zb=maWWsJ^hv=^zqluoZFOB5)Cztqa?f#Y2N-OhC`1?-4ZxxTd=TiHaaW#$C!(tPbw z_8^(uHKP1g10W-(EpL<|8dr*AWh`sUPyhhOzZzL%EmFWTFWCnI&zvd#gCE|Y=b>*< zXg0fw44q2}@Vn7(Qblg?1AGjavjH5>MLc> zV<6L8nEoQaSRM4R|6uyt!JbKFz5TznyXepMs8F?3ab92xO7ivj zajeBh@DStD2Ghps(`imTKh<>?O5AyTT%0a4O;d^Om#cRvv*p#xHxphF^cjyv%s(+< zQmb1XN|YHua2GzR8)bESy2*R8A$J1s@tqwVmNgfIA@6?U)uLC&Ifa9LBVC3AGSs21 z0AsV;sn5jg)gltGc$^fzpe`YHu*{>+pukcATPM{hFGAA0A717PMacmwlwHquJ%r+Q zXzlwoRA2{;gJAQoLhNHLa={7*bGJrD8Txoh?dJBN1_mhm{)1viUR zQ#ei(^v8>KplMQ2b2eJP!$tFCKvltVO}rYA2u4s8CY#ZDefgHm?%zqCbn?*|--P8v z<@PzMbO2@1=|4)JCyQLxY=Nb4dY)A#_~2T9>>^-B!}pQT+tov!!7Sr3TMV1rHTMS6 zD{l(VOPkI{ZE8 zudmt+abzK~*EugD;4%tEQ5K)UGIF2AHt7~LGwSRBW7~k#63NO{8)w5`8tzO0m64%R z%=4favVWvLyaB5{;enBstRLQ1adUF+Swm$zwAh@1gPas_pt9_>9N^>Q%S?s^D_ck# zK3n1O)ps4H|4+KE^gJA$N#cn}cvgORe(1AgZ2cJ8zh-SIQn-44tOc_-P);6#`<^}I zy(k@4Cf848gKk8g+1JH6JwK6ZI=5G@4Rs?I2FpMm zbun2y>^GN<#F-*I9z=CFKd8_EpxvIXJe|LC-iJFSQgqSH$;X39sUbOpq}Aa*=JSiH z5+pNoKvVH6s#p@!$n#7-;S&PKg3bPL5Js|(PPq|MVr2ru_U-FvOv1)b-*w=IXQ*Sd655Rzd zmvm9=6E)7ZU#jsENZnzhq#bUjmu=dLW?nQ{lSvyv@_* z+LTqZ!=78jr&`qrhr4GKT2yEl$=R{Ws&zMfx}Cej9hu5~0W-(LPK%8Fs_@bDIeFq@ zh0~(ZY(5$uom0c;z=aELAoDZx&esp0wKI;~iH5%1OCS2JQ9ZA=4wkV6%QsAFK~m^) zefLU_0H~p)miHnBCCdt69%S;leMNaDZVOK>Hj(b_NjI6aDMVUM4l z=0649?iP~`J|B0ol@er{%7endwdhw7nnTOS1Ry-x&70PvmU6$J*-Pg7R)BOz@FMc4 zG=z_qEVv6#E|+^ZW+vfr;YepZ_bA?NKse=ee2!O*voiEuC5SW^AtOwNr!nji_dJDx zxrNOD&TNBxya_rNpSb`kR3Hp;k}4PZ3A}M&#^fQ?d{dvf0Mm9%EtozIdh)vhBH3~~ zz*)5kstBLYTN~_KJJH=UJ^J;MEwR&Fnq+4l5CfPq;LV3D_Q|%TCAnr{YCJss9g!{f z(uP#;l-Q5)h*!DpDFWwXZ`;)Jv^u6kWf<)SvEV}+2dvmsms*}j z%>yRDDgxX|zi#zex)`PFA&I-awPO@KL_;&2TP8jLp8{w%s(t{1*1Zw;cH$%hweDR; z^CC8slVe_k&8Xfw`EF0EbI;e;H?AB26;Je)5+q6+=7f5QsmJHs|33h9bEbXi*^YDu ze)K!EGuqGYB?A^cRDh`|D-==27B1Yi9)8zr5?Vtrz=Cp8OAqzxoNtYv0~|Q;(v&&$ zmLY|f*7G2Oe%n>Id;4lKhqlf_P8aJ+=3M)5rHQ41#Xb_M6|~7Vl(jc7C*r8T0Layv z5otXEBFn$LHmyxffAhZ_0zgWVr2btd>kC~MjzVJ4RX{EC5kJNT_haW=+W?DBU0>56 z5NV2$;_LmybdeZ>PEdkYq#0vdv0zJ;|3?;eqtU;*<6lSmVLjVmcw1{ZNP_j=;>>vu zAf=7}Gek4W&7)s51)e&4hPn$76Q1YWliRzcwF*b17=2FB=2jce5J3LCu?>COD~-Z@ z+z#bSrf?$hj|-klTZjhSU6`}={0u6OP-tGs zj?8QpB^v-t;P>+d03?Uv<8;E>MHv^X#El|_#P@bzruYkUacwLCA-zS60;QUKZPuBX^7SJf!x2y4jZ$zlnL_xv@z*Zu_H3WRcVLs_Vfv65+j zN*w#5{iis|;v3rrjD1K@gu{V{p;Ap4L$2`J9Y#P*fluV`;!t8N@u1YJl;FyfR;j|q zq54BBrZhy_LZyv!TX?~-R*X|ogIIol?90~j>JD9te~kh!U>jnxyuyA!W5R?YL-nUN z3NrG-cZa`%&QtnjUwRHyJE?_jjc0nd~MZ*_;+J*<3a$)rNdPiv^pe&z?ak zXXhb8Dp$0OpuXi_L$YeIHch21qef}dXUNbqseZOOIw_?&YW3-e3u8m=QD8Ie{leqj z@v@pTqtz|eix(}5+3$SvwcC9g^nezKUW0AA-)aBL2ijvWIUF55Why&&5TNmA z4grGRaqm~D5quNWMq%-aDV(q9^d9)MgxfO`WR z*1~$C$kC_3TbnDuU2o?=XksYv(ug5yK-zJH;U&N0!&h00*$1DuZ$gkT1T#zana1MS zKgiv2y|sS0y*T>hcjN+am`)4-c7HwynYs}er)&U~zT?)7Ts9h0u&mH*v4#@29=1%O}3zBwq$#(TB$j7QIaO0W$_S51ItlVZ6bd@F$1Y&OgAkv(KJK#DwdQUCxrk~@d(IfobBeSSdhkgv3M~}DNxBwNo|F! zRVdgYTv53vOP7a#A3zGu!C>`7;oB0Fv`DJJc?uxu6#j+@?KVXVH}Ed}sUKA6R{xe< z{P@S#c=m%($HlDP18vpFGcV~lk;At{`kLoT;IIB~j+Q&(;f*}!9ez~lhRNcK@AX-_ z5U6Xd--!%nK{6J8-pT{q0f z2=1Xage9EQr2VSa2^56r@Gx{KzK+^EAQ|{)M)z~ZN6E-dBg-Sz`1;2A9GWL<89VE` zo5CK(88MbIS-nSqv4QxsjP{DlVuJn^fyvX@+eztRH2{PA z6!jH72}G_9UC5k*fT1HWdS7Egf$T?+rc4E}ntJmHmvsB5UiH1yR(f(DXF^RKW z(qf8jMo6r3R>6o&h;mthGyb7)oD9VEd9%a|n=aI<|8!t{Hq(VC@Q^oe1VjxgUUtau z%UpKMPgu{@`C&UR15GqxRT|mOyT#)+y_5}PP+wTp5ASa>WfxM_hzvHH*dV+oWX`*X zvw3y)5<;RO)2HRN{y?s`E^>%9L6_h1d_bb>b7*Q_gk8ZkR&56sZ09{9RTn9@#G6av zP+T?>Xwj6rEH(XO@IJbOwiCf`Q4ry_#3I|f-L%$KS%(sxV6DlkR!KdKNpOGg&A=D! zgeV=8*C_#mLOtBS(dJYSuWcsEmmv9-9g(>MTIn4|`#k08U>nbbH~oKgYk4Yi2-V7q zl&?S5hqd*+?X%o$Ra zJkzYz;Sq#E<%>gHGv4W_Np3>shG5O2VN^rm*9^TT`XjXsGDkfk`CA*7}R< z&fywckKyDE8b}T!=5T~qMb0Q&=fWZo{)HVs=XI_p2nbG{G`oWE&sXTQE9P7essw=s5CWN%UILQ6gYYIt6#rLHR9G$x!>31X$NuSM?Py z=3<-+?Gtp{(`F0yz?y_r^S;ONvq3-a7UV#|)v{7(H+t`vHA*50DYIt!Mi-qHvc_J! z4cTp_^`Tc0@^$HRCJKHA(XeF>vTt^zJ8Eu~8`v?HZj_$k9=Tu;Ci52Wi(&fH-6IgkFo@F{Y5;H2D?D?@!WOu zTJ-qPnT*PgwV;K7)Da0?{Zmtx^&9oWFMeY>q8A})8{kio3X&~vkDzr~Q+s^M%sdmV zzx$War>1U1iE+-3MWwZcJxP!a?TQmrJZzAa*r;zBUFtR&LP$^x#ly|ZL9R9O5hJOm|H zI4+NV8@?Yli5sXri;8lD?T@rfjk@ZGl;YYM8j+xv82XIbYD#e0w2wT~4KClDgOah}(IJsvPqpgOR*<%0J-V9_?8i2{ zQK$}r!*5;~C|``=9sWtf%@WpxnLh+IS()ki^H$-4!nZDbq%PQMX1ipM8U*y3NP0Gh^?1; z7&2pWNf05<;W8j`naLG;)HdnRZ4zZ1d92Qxf9Q5cU9Ra1c!h%ziq!L2T@1(g^>glT z*M^E4R(rI*`pvpq-!F?U#uKOQuH5LM%$(0oR9RnREFgsxK;FOUx}i6)UAA7DmS*9Q_q-tZ`rPbwC&CRF8O1M~$VyUb|VoKAYIfr51vGP3< zDF!KGqzfn{$wu8<>knb&%8)5`!&fPY3BQ*Kd+oFfuDSKm9G(|kq z1A85k*zAQ0LZrPy7A>8N=-T3*Ly@6TCn<{ihS9n3_|l`Ml3zf zb$OMaFKEJmXUTQR=DI9rv%BHjRpiZv>Xk|OKB7-ofRyoinFXac)#LVj1yfhtvRKB6 zu7SxIeFo;a%pZ~bv38rj+%M;2LTgLq*@W=5#|*c~x%Cmb5L7p;D5wZ8t7c})aTYjf zDwb;xgm?ACi-2GI=_ph-=v6AXj#J&c;zw=Bjbfud3n4a$}ufn zt*OdqkT^UFkDnZ+J8sJCL|Av{W;^eg@OzQ*nwQHDs9hH}pX*KLAl$-D6GD(J~eBu~~PULeNJnAN3yN6Mrs z^;|qU#Os>kI@qT#Bv`9n0k{_LKvxsQ7thJCG!iS7$wTMF|iFu^Xn0Nvj#6}g86uR$2V6#Fof=4 zz0V}Unu?>x>eJz$3B7RBPC&*GYVsO;Tx9F)vTzz1oIvaH)wTW*YE4L|ad^WU4k+Io z=w%NI(2uHZp=BUccc!IhPo8Z|3x4cNvV{hTRuH>`h&> zQ-K>3ZPFUq+&n*Jv!{?~j9o z&ea`%y0mUk2#)X#*4;}NB12t8xdwQ#w^}z+|1%dtO9hz*Z1g^?V|-|xQ3$BED-eou%B1GBsrMQThq^68=vCc~&C|0D{wnQ7PP z9f;^90)Z>OJ$*|+9jLr+0Mgk}t9nJ5GJ7$D*}jV9bdr%bw0xel>)MuH zXc0`l`h*`ZpUwOQYbQ)d81w?mGdxTRg+IkXrOLys5H?*#`q&Vr*i4plZ?LG5bI2j1PdCa3-3{fAZ_GBj9{@kcG;=+0woE1^&*AT2*S6Q#> z+pWuHRHo_Txc19ut;$Z*m$1{27UZ8VqP1^TIr5vMh2*#!3zEU5i9g7Wx|t9nGvhD# zA&zq^lES3GtH&o;8f?EF!VXI7Bfn;J&8GPTCa$l2eqkm(a}BM~RK<=8zX0#kcachB z?K5ZW&Lt4R$#Qrz>K+~+W|nV!h9UF^L~r5@;BsAt6Uo8f`ZzX~sR-f-Giz!{IfqlHP0`fGjgI>j z&VlpP@Z;miy5tQ+Q2G3KnqVo5Z$|se_Hth#l4kHa;=*O$e%Jm-PS&U-G*~jAXM-m? z-IQE@N;o+6&!~d8ix^2QoMh#6fT!b!9wo?9Xj}6rFk&Axn#M~C>$VC!jWxP9hg{=) zC>bG7Ta)^ZQ9{1K)69A(E760{ma_FoM<#oEWB$kEi{Bq>Du#s}4mdeq7HG|_D@47f zMK+s%cYvLqh^Bi`vx%BN-Scv!U0>uk6JqBNL~)F?@UbJ;`T;o%LKFBI%beJ^zH2yZtw7N?Z`pA<1lLsB)(U2ENX`n%AsoT5 z>=BYcYe+BkS6Gl1LJpyUAMU=qu4P1OX{C@Sz9KTj7S?iFfXF8=_h*T(!WaudW5H}Oj5Ld`;Jyn>4*mlAe8+ihK#(o<(&5bCC3aUBd$B3pFMR`LL2KAxXdGEM z6^$$_L_Pu+8X_+(??Gpfo~!*w*yq^gX*Gs+U2aZDS7Zgu7(s}X#iE#6|L>y!!Fy8+ z?aGrEqW+7qQkP^ztoL7P8@bL$tX!JK%o}l2#W{NLx??u(Zm+oJ)T?r2TaO$-(8vUI z_)s`=T4dpm5K<(ZnFH;lrfM-z_m~8Rj3qm9k~>@YSaqwdiwGFH*C8SE`NpPs*EaU$ z6^H|cNU%r72f+*V*d&+^T=GMKnCpGWEa?@jiyCvvJldz8$$_8b8|=j7!abXwY=U0L z3st+`O_5frE5$|<5|XiLZK3$hl_c#1**I<#-y|^{O}Qr#goa)Lk;!s{Q*n=HW}*uw zWWU%99j^wxr}y4NsvPu zLevpXwHB&}8A^wWJdF*+=-UzU3l`-!l#v=(Z&-{Eu|kpJxbubGuA}0ZFO^orrt+O= zbxA*JF+ZLNx=_5f#x0k%oU6ZGO0#pw5?r!Q|7U}AZ8HO^?UEnBXMI8Bj|#Otqb1k%F_KC$J9omro|Xdc0wI@%{HEak*fq#c=M2|f8w%JeET zF-YtpjyN#!{nFDR7n#=0M)0gHRz}KP0ag``=Y2+t=8sIYh&`!(+w;Fl9kv^9XBNDN z$Ac^X$PfoA`8Abjlt_}V8n7_%>$(w6AXTX_<5b9l5W^F+Q433nn?cfCauXZmG-N)Ao; z=EL!~d2n(~x=j|AX>Lzz!@I%;Us@BHChwk4G^naaLZ=hH+uwfspju#T2V-zCuZL`Q zO~q%~a6iCv)g>(YDreXn4=Nbep?&o zL++`7+YBl6RyB`#$U%Uf%(wO6T7v=`c9ELGe zwXa?*fgY`|7e3S~WQsnnsG*N8e`Eb!{;L8r#HuEt!Gt(l6?d93kx`8tKd=Y=P2;fx z*Wx{+i8rb#Nsw79s83)NXa`lq8ZkxLDhADvbGZGs(I3q@vPAbo5Bn{6!Ky$+oki~U zTlS+}!3Es;m9r9*WB1a$_D#(%yNJj?q>wke$aI-XZQ+_jHf8G!4R5;;Ixtzb%twt2 z-Tg^JYO8{_>VC5ZRwjv8bl2G9#b|%|-h6Q;qRBM#@gstFG@jrX!kywZ<{y#4*?%4^ z7My=-$MR#&Ls~YpX^CXMfj;X!fw6HivrDQ_AO$8i$ER}aL>w3pM|NjgSgFttm!~9j zovH%$S2#Dv_9fFT(yKS?cC)6w@nQd3h{XGUz>(yFJW7LQMxtx zyV(&_hyf%8mQmm3^g%4fg7}Xbo^OAupfi0=W@i3m=YxD-nrj>$7FQjOqx}-~v}WB{ zO{G!(T#|&^8LcbALfRJhyu{%07q@nmZ=I?ybuKef_3m~q$g$w!kF`QUifiGfmG*H{ z9SB!$-Yhz4caRcS5Ra>ghEmN=L_wfS!mp$x&WR5`B>R5RnS+!~2V)$sLzzM24#(Js zorAyhx}n#$V%_G4tD-eUUkc;!j}p6*3>>O*I}8JG=3XC7k9OjBWXt*yQ?*L0aV7ar z;)p$b!eGUaU6bkr+m%5dlOHz5l;B<`TF`)-e-%Yv&f5U7Oc$Jh?VCZ_k;V)zpK&I-|N7 zZDxmoS;-Ue2AT3*9?vttGCE&jc7gzH79_TIoupep&QUtdQ!!2a)-0X<8J6` zN}vUg1xWtvxm9B-%3&&`?Cx>u5+P58(JyXa3m@b9b>sD<4?teL_hlk%5Z@{2TIocF zgZyF9Tw|w{d3Gow)<%Aczpdimeh}o$EAZgVLX1kTFLdFZcv&OvV;%01lcqN$h{sV_}zuue76~qcUEqPPA*kH2gjjg31#m zIU&WC1uAtSHAjw*4z0OCKIB*s;?E-WJZplJJ<8Xsw3iCB5|LV}+1OSgPxfYMF{0{n zaUdq?Zj$bKEGJV209%DN5R4Pd3~xT@K0DKMh$9QZVKEq?h|6lUX^0Go0|TWv+eVrBPo%cwN6wt=_{k z*<6Rs7rMFR$fnQ=HvevmTiKDkzy;woHxEx86Z>+HWRq#lls=-{_?|HWg6sfV4P_Yp z%8M9di)!+am8;6TBgjF;ECULQ_c!Tf^hU7=#wDV&w^)Z7pfZn`+v!zM#}EIC5wgL_ z6;ftYY#V@p!)oH12Z|s+_WX=uJIiM{_az`xr%&Y;QGcV5jsG0hF5Q~OkB~J8*Ce@+ z*xZMnn;H=rCVOfiD!=J2TE@rrx%<0pxTbZDPT7lY66pL5cizb>CQxO@*Yhu^h@uJw zF1}8&r)QbJNgGyDI63=N<8^6m$6VI$N||@Ju)XknL%zAZs-%XOG=c}{n7OvBzW;SKt$ZgD1u0XX66Pr59t+ANM!V^n~bA^?U#Y1 zilR}U6{SK2ULFDm5C-BQ2rERg_H9P7@JtsZD3)N$-+ zV8u)LNEb&f=wMiWkwKpy8Tm&waQIITbj zi8y@B{AP`mHCVmbe2QjjTc-_9a^?AMz(e*v+wFGgs$=`_`!^=0YZDNC;m6a&#!3;- zMx0pO?zo!pUGZ2rl3WfGrhVh>C;FgRUt4Q;?HCgp=*?$e6 z)|72QeqpC?N2**zRt=0Sp(3TK*75Xk?8n1ir>ut_hg^`^=OS+&Xa^4K*L+Quub~4$ zOvRBf0&dL(_!{g8i?Oc?_tYcU8-FhnDAMf=k=rJ=eZ6|cP6FnFuZ=KO|Jy_n>-neg z>qVlG5=LeHB=v89`;@Xs*< z_EnXU*gd9Ut3IwzHT*ae+HQL-A<`i3I)w6zBH8!g&!7AZC(bl*e*D{FztlU-9RG)a z_B!)wQE#iKr$v=Yrx~WibW5>A_j%l8P?}8cY22a)I z>|CjzGcg}^8}rP}Zt}a=E>Y1GNh`y%pYOHA^Ex)K)7=G!VKq!=u*mpP3_W+|X(na(0Ln$2E}QR!;7qvIB?h7G0oS2^7G zhfYC%wWL+LWfdX@ln$CH)4ZYkvZk5&pCxY9UQ?KWD z+gw>070o#EDxueq6M>6g@7q5|I2kR}wq zYF)etB$ylUA_ZAp7xnB5;rLio90RQ*Z3aqG+v1uyJ^@Nbnr>`d6Yc};2A4ErlUiJO zZvZ?r^00>jfj{OBVAuZ75Za~|F-)fIw$o=H{x;_dfFU5TKv;;Xl_FFC5bkHWYO$e) z2Osep@C@JQ@3>zeG6k_x$s}T*2**RAA<(Y%v?^))N}LaTs8$Yu0f4Bo1ix@^QC1MV zfK@AFFU|V!>*ZZ!3G>c%GxwDA07O_5fC$&id;;#cKyeZBoiK)0ZZ0_n^T_XTuQh3c zxkQGj8M}tg0RR9`=AQXRP+>t(+>XzbYd94? zS1TmEM%~$t(gEL~c&KZMdxD?>NVNBLsO54Fx-k{Y7l8*T0YU%>nzLpJY1>h><#WzC z#|z)E6>_(fRS5;93URHD3=aXu%%>(|Fb~Wn_m4DN?vplQ3g;3~*Lr2lBWsy^fox!% z@t)2RO2eW>i+s_-Q%*Uhq!^E7t-W$F8{`;8z8ac5SK-3SLfYC4kHrO#o3s zYeiTA0!*fwp~;=AXfeE}^_LVl5X=Zkgh;4iYH4m5jvE4yXtNgdDPivZG|=%kt=sMOl3#Grf!jSSa;&PhFeNB{){E`Y88f)z-^ zD1>Oph`1k^tI=|OR#HYd7laSzgFs?IYVZ*u&3(X{49k{OTIBo|iVZx)T>Sj!KQF~r z;2$h7fCN4SaM0*9(Sr4(R-)E?LnB6sVGemj>sGgpZc^npaMxmy!+)%aM%^v#>@m^E zZ|Jr02sjW2Aq^a29+?N$2tdyqYON~f5*}n8SkJ6qge-H&+G0NSk$F}g2Z-Qb2Ef#a zdn`GJ?P~hOwQ;?yZTJNlrvTEfW?X@VH=~sRGA&`=v=xEFMl2kYMT?+hfuq%-ouE}< z>T5|6tO^Db%mPHX>!{SsMm4~4?jt_W@zAsZ4}6juv`_8|O&g#OOGd-gXi1D_2#bTo zgKLB9Pr9SHMvfcqm`59H)Tat?lKG<+8jBS71MbAo-G^|+{eZR?^37;y%AvKT&$(|{ z54bxCLaW7xwvW)ldgXp6&oAj-z=}YC;g&;)VU-g10YtDKP)ZmGF2D((fnOMahgyi* zGFSU=Ys=S=N6?IMk#a07UR)YlODd_5q>b|-WHkl=Y5$aZvv|~vth>1NchtRC&pJm) zqU<5Ol)soy*0?HntP^Aa^N2iPF8PiE2=mAsA`f_kKSP%?_n&!$2S|a&D%Slw_41`E zo|+o2?#EWt>`U@4yrO}AcTT;6Acm5Xz0ZLNs2@CZxRI9X>}3yS%J*IDNbZH-(mW)*7h zoDb)vsj*ma@THbaYSe9|Nidu<79@|Anv}K5-G?Rru6Mo5D`}*U!&_Q}vX#dTWZcwN z0?3}KeU7ESRlCGt5x^AW=HoITy9Qq;8X($bBPAwqvv8AOFyb6&D45hR8=6rw*;3jU znc$}aQ7h&Tp^Z>M3&!tWE{Lx6eKshma61P zCJlNC!#yGo2NJ~Q=8lCnS?O44cmM}Lf>2<=V(NujVA8DSW&j8(>|k1MK8pbt0oPcK z8S2rpl+c{8mN>Uom5PRexfX;pf{+omY~>ibr-gYzTWWN-d{`m`YY=6E=gd7Cyw0)C zEAz)f)0oUy@08};5#-E;mJvrVvnY8~H>noUt98DW?jOPw5Mf?)&xj^|guKX{s3nJT z0eG-@5p-A^fO4`cT{GGy=L^82U@0%CQiyv;6DraigWrOm$qr*aaouQ_Nd|LR?>(vwF*Fk~pRR;IdchUF`obz>l*N2E;X^E z(p-ja<3{)@%^Wp*j7q%Abpjyaqk<>^(#Se5H`f3(@fM-yAnw`2rZb2W`e_ouG$}qn9nA6BDX7^W3=;;2F^8ML)}?nC4#YP=UR^?&?%(Zl`1?H zsKSl8J4->+cF2~%Rhr7zofbg)6FT5voy^5FfuYS8ebH+F;nGYrv;Eq;>Rlj zhk=Qak&#jRsU5O5knyR^w`{#9T*-xH7KxFOkx~0Wqg=DDF|%0Go;UyLPTe#hVq|1w zWYk?0RAWdb%+mshPhJm8GNf&fX=G$%WYik#V-x`-Rey=J0OGfCmn(BVnirSaR4PNG zk-KikXzejNfp5xvE*dp~Q9PFMRhJ(lY6ovbWkU+hQXu08(|kVdMnZmUB3tOS7+<*! zGszys?b;Njhsuzkj@A1b&Pwnc8WoNyt-hThEZ zg&ALU;4v~TOJ(X@p~y4BM17ee?@Z1yiti08wncu#5^-hNzMS>ZE=wdZ5>j=?stZ80 z<5n%}V>GQ1gw8Tq^-pe%+*4Z$Zlwr>m4X`U2LTlX0pKz6+%m3$Te8_zpXU2^jMDcr zF-Q!G;2wwp>Ma2U}V}|etV~+3+JTqUEq&YIC@4z<*s@jTEJ6%}q0ubM! zi5>u9)143$%Do?!A4pSzd`C0Ujc`Bd#0|;fzHd|B-M=yK9^6!L&k1m>67B4zLFcu} zEm$8>(TSILmJQa)#NFK}H*2b_xRZA8aR*J#x@kQK?&4Ccx5-oZyGdncvjEhi!W{Pq z?>s2H^Zb^`^9_I@Z#mIH90ojYw4peH#xLaLe-_FFuSJUGA&-E@3WOpz`j5>y|#R`%8)hmzv zo_8W|v3f8OrDI>=8+hgj;hjT-Zzko2En~$edw z@om89f1TWouKhsDfkinWsdN#;weK}JK2mGkmM=J2^KOHn!UM9}f4_dn-S$!uK-PJG zLljDmq6&}+Jr=PwltL?Rcum?d>M{(x)>iN+?`DZcwq%c9w|IJw>(6C(;EVK1;jrc( z$@d3DquVIFu}nbfLE)P_UMjeoHi+^e&o|@W94hi|q%N~0-{{E};-4-=!)m_>gsHuG z0g!?_TE;78{IgN46=hQ2hT?%H$Z?L`CFfqbN1t20b-+Em zSxgcHPA)J(jITQE7=>Y_*YkK3GpW}|1Vpm#ph*RH%rxN};rSz`7OmT9qJZ>zW}%gL zCaAb8R%hI=D3qtI?!!dGs!-;)3P3ih1yZdaNVP1ihTT~4&o38{xJ?%RWvhqWjq8To zT2U9VG77SgaGTZx+YCqJ4QVuvfB-bVz6MBtb6sWUU%BAy*>ugDiVXk zJueoI!9ks{q9J zztlFE$&;@IB57P$!#^=RaRkSg%Yu1uW5M0Lu4r-GFBZp&?eT{J51*|g-^ixW?9xnj zvp2+!k&5yH92wEFCW*%NikUsKczV5%tk=i8V`^^38wqO%BX_GKtsmV*1Rl8 znb_Ml47qF94xPyM)seQ@mVNssWd9`YoUz*Ld)P#=C{-W0YJ)STGeu4)zeqp#U>sL@TygV?w6~F z+@k^{YX_a{>&^PWVY!86)E%Vk!XlU{_w!`Y(hr&pcx0N+Spp}pP(9zQ9CA-@$-DI; z*?ao3@xqO{Cd!OFpQgMS!=Q-ox~H$m@2f>9{8p@yWx~5FWSl)a;|`yajng+9r)l#2 zLuI}ei-1`}CYh|0OmC(sSDDe+5x1>ZfTTxm;2`lju`qtSc1U<8@17S>dG&5Ncc54z zlaeho+T_A&mj$xUL(=;7BS52(-RPArTHhlh2JP``5dhb%8FCNG0@)O=`TpFE`8nA@%mWlD>?%SMqgZW~5@M~uE zdbif}o=n|}RruN20#dJ>+2fuSOKa)YD30+*KIp1Jds_+}2o`~bt)fdUMxOV|n?r?+ zTP=LEMHHdUZpf`4$h$Ll&A6i{MgH$aBc$L*J17WKtZOBJ_|SY1p|9$~DsR5+xL9@s zB3W<(9zR<#=&oKduBl?w zW-uw0mzzi08+{N zsAP>)BZR=iVr~3v`JnsxvH`am#Y%)gU)GpyyqN06G+1Fed7LNK$H7x`Zm<5VPg8Bo zqA9wq?s^Hg)sVYZz+=r&!O_+^n?sO}?KN{*eE^B@{(3p->sJrDjPQA1w%;ASOV;hu zpgW7O%dWi{cd&pjX~Q>(d9zgnPLHwKxFPt46!$d)&RxE0&}|lF`u%%FZf8 zG5eY|4ec1^wXI@0e8z7OoF=c?bVp2JS}uwMgwYNzsw7{H;4cF4vcjGerQ?c~Ma$#a zEqRv{j`o4__`oC2F)F3Swrj4!&4Z=vu52-7`RT2B_uF;OO%~zb6ZN{qGjgSEC$%<& z6{14P63NOH5RhLuF(W{fagT16g)f38$`?u&RwLJBygPQs2#YUj=2an6a09YxmWl;? z{lK8xRqp%Q(+h5XZ(+nH!)7S?{Qy8#w;VuLuU?&MBVjf0@O5IcBR{)rd*0o$L9CBu zL+%;Tq_PupF5XM#cb%6U7ENrL;bzwQewxqNG3q-;81`e|#ANx-tX}6`R|g1)^ptpE z$2vf@HWE^@hK+&0T$}gSDCC0A*(E?~Xjkz8Nv)~%}xKvG?yFzpJ~D?8Cwu3TBwg_V4s%vnxuieaw@ zx%!ujdB0pF40-irB@Ea3$v6uMjt?pfIlnu5?$n%HFgfRT>&?2b19Lq)w5gqS$BV)@ zB8;EkS}1Ps=ty^GreubZrL|37J|)0+gPi+~>xX=By`kR|+NCGs%bZ||FqiqFhtOy| zk?^`No++ySReUNhjXUOVVsBtZxf|EMLC7?4nLu9pxUApJQh*TG+#z@@~^$>2IIzA-G{}1dMDE!b zR92`Lpk%F3(RJRWjN7BXSn$zBR$y^lEu448!** z*tI=tV%8ltCFf@J>=-YrB`o!GD+VI>OZEuS1SVTdUk$C)PE7As`@rjqaJd>s zh!L08N0WyP!V@pF6+p@r_o@XVQ6Vas=S;_{fxO?gPwkhrLq)WRDwyg<-AyD1p*&3M z%ec9db8g=W_3g+Gd27A!3Yz;Z>+^1{07-A}@Fv8l)3xu?K9pkZHn|L6AoPKad4FAX zwH}Cf({t@}1V!fdXMH7CUgx98jc)gLqjo7BnJ6k{p=P{03uQZU4{s{Chr|k5Gl+6l zU+eC%0+6Nr_x$tE*R(z=3xeWj=svg1H}lE6=e8C62CJU9w?QSgH$3TLWeVkC$hp}F zx{1Z+OpOwyN`52YK~-AMz`0CUF^3w}@H_GIr^Wi9Y!6M*1`3V+-fq3l&Fati^k6j6 zyx~TdFPK;ghL7=}#|Zwc7X>($ir}QM=9U(DvV!o$(slsIbL9XetqUs*@zA{IG5HSw zp;tpbDr#iKjs4LG>%)Mfi`PI*Sm0S1`-+g;N34&Y%rLK{QJ6xP6qR*=KHuBuzy zN0EtpG<3G*8fAtZP`PZe0OQ~gYoXxo7E63fKAk0%2`qH3f104tY?;?RWb}?`GG(i0 zJS-&dx$%8y3O*J>8J-Ok4HXb!9~5nwhdLt#eUQ;3>32qvZaFTqStqu5(2<3tiV5xsP-nd$Pq}xsAzepJ1Y~U zyl)?p^&rZ}>g~n$7tH|3(xpqaANtL)tZ%sm0zK{jd-~}eK%=%Bt9aR4MaxJF+2tu!GR7kFFPwk;@l~Z9M7zx2FKe9=(l8eYA6ehAb9PDeJoyXJQR4(={wI z!p)W`?q8v&$8%eY@xWorEKN4n@MlO$V1DVlt?S3a=-rAR(GFHf#`mom_H7aiLE)ep z!x|Zz3(*b`R1XEAiKpxNUcIaN2?dZ!Ef&kU9>V7@31~b^e~*D6SJ$tDLdka>YMYhTt)?Ws@HD^F}8?OXVUJb0`btsxa&J%8@G=SIl*b^*v! z{=cW5O6|f*zE9Bn1^Sr}6|XWI4OeQ75(b~C3Xq*4%lyIkdZ0gu5Q z+NQ;s1T~P<7DIO}2X1I@#`y+HJ*DoE^>6sYawQ9EAZmbXyMS=@trPHgX{a$hlI<`P zbIx~PEbjidk|y1`VBAQ+C-xd+afH?6JoVFMuljK(ayPkP;@F1`9r&cNi^lzHk^;NED}bs`eBD zN)-1*Xmnv%FM`oKMyAvRq}b?aVy2jF7X_wxtzHIN7;!K{!z#FK!Z-QCaL#*DY3aVX z8cst-o7^y%m+Q<`C<9ceFh|AT$;EE0TwHXo5M(!|l_$o#V|)y?FyhiBQ9kd7(^1IB z-kPY?p)&GaFG)@+_;$EaNx3S-?&BJYiyRA&Qfv^Afwj7xQ*NY*83?ub@y8!`#~pWE z=_IHtU#TBR_~^|p=BTtAs_3Thwex*X&di8HiV7yho+HI2k!isHZM_J_=op#ESD~e! zeNMBB;)X}rTE~btMo}|vdx82f$C9oP@ee$ftZ2WYs=a?_4x8 zi;wstJ@#1LE-W?Gsf|<~9c#F;%zxFDl!+Ow(Y<~J-H02Q`QwchBHv-L@jZYlYXbRU z)Rq3LVTSNBvrs;O9+ecR;$Evl6G8MKrv1oh0LVl9_voXKR@55HJs&YqaXpu0otI<; z-CVfi2thI}`kBo5o~F+JL8JEKM^YY!agah;yosXbm)Mg}J~=|Rj~almbFtRmkE9hs zjIS>tTw}4!IGUwmY4*NGtu$8*q83J!bFkIZ zl}1Qu|3{%>3UBy%->@*-*k;;+fq~Hl5CH+wyza#8dFY{sO2Cjdm6xE)B)Qn!h_tlO z0;%lAGRwR>3Gmofh#Gd!kebQ{xt{DW14QFYaAaMv97%tJu6gEmpiqjxx~#30?qr0h zoM)y`F>?r4Rv&yLrg=6~zl0=~9jEZ#8O!#zTOiPb9z@$nw6I&HsrdnqlTN|jrJQSFf_1s1*4|cFKdK<|B?nF@$?Fb+<<@$E%X^g>{ zTgn6t7PRcQa~WGKU1WMhBi&eq!bl4w>8lB>4^reO_k`P{(g#F>SoVXBoOz=Fa(^WN zsRbevg|m0>jd#qtsPxZe3Oi1c8R1>;mVv{Hx$4yJ2}=8C(k`sx2NA4PoSlf`k>6qI zbs?^fJZqN2Y*8g=)(s$Wm1|O+0g!yO13=pv>JIz}EsrQpf!}dM&v4~Hgg5KKD_1$^ zdIBvwO36k6fZRWt0CKN~tOp+~>%K~YNJvw1G!~+VwOSg%w%KT;E6X-QjRylH@*VOh zIlVDo)~2i_8;gnu)qDj&Ci5acKBQv2J*>D8MT^{fVzF9rBQzKlD;95$=cdzpqZm~% zOTFwX>^RDN)bFnn3BHT)4#hOH4czY_8S36fPPwPSOTEuS*8TVUE>^Q<&8oVQ&@Yf| z)`QBFY~#QVRL59VqyXy1s8jNe_!sTOr1~V{wcCH28cW-ZU6RBR3Hy)>Y&y zJY?JqSq6jzCT53S|7q{}>eZ{=Bab{XLZ*)jfXE51lneRsnl)?A``-7y=l=9h|8$3q zPL(zlQV(gbxG#kNNZ{$f^n*Ep|+tIc|CDsro~7mEcp;(a0pa^#ep+l{*A z^w98%wkE@OVN>*&%?mOw6hT}d*2nbT#;;?$%*D#VLb2GSEt2l@WhSfd7J@)}xsMC= z%9_|+tnxkR1Cy&iWg>`Sf4|XFZw5f_Q~=S!ij`7%!^@_i-c-5R114tNya~nm$`=AK z&Wvo!R&7VRt&M+_QTjHVl@(eym{qi~ zhpRI1-##J)j@u<$%D^fYQ>^ADgeXw@{PrmB6>lq}i;%WcSG>XSO zv06zk$(2eFq+19SPz(U%&QS*tVQ>nlTtg1=-FM%e%KFgBB9OlSl&srlQqC9u@~P{E zC=pC|RriD#b(b;1J$_!mW7+n`?<8{ZP~n?n1VsAtMb}ujY7xGqPZ0&&z9Iw`P0M*L zvhLsU?hM+R7Tih!=&ej|&RDLiERP@qE7~D=9+mZY^^4U#3jmo=%)p9SDK!zlj=KWD-QXlt~(jf}E5Q8w?FRx)L)2W~S?2LGJT2 zBIKUlS}2xek2UBtJRd6K$L*4HXYShLgU&9FI~EIUMjfBh8%4uDED{SyMBJrctntY? zx4(dhZX!(M#}2!9t{J3FbmTT{FO;f17*64PzXTt%}0az`ZqNrFfrzHrar@_LpmoCD?J51yQL)3Ug+3V|CiDq187 zSC;Jy=}z)7KwxH2`b z;sT0MR2ZQg`ESUdP(0>}>^wmDX0Gtf#9Suei}V&qD8t7JKsUM-=YofG8r$)iy=2I`)!S15J_4f zrRuvomM_t&yZ!Qb@Dwc9tWR&Rm)Lr7-+lLuy#*rRuuL9)wsPf4cgG!fi~x^ z6LCY7IbwwzKV8hQ$yuM?i&>xdZElO&WE73GIU=jD*j?)*27lZ!+4#P#5H0!MO?khu zu%rT{2pESvwVSBwr;FBcv}hsyu`4TIh)V9Er~@PF>b}DA8W6K#7g3rHnv!*k1VHwk zm<=}mr8xJ@a6TuU=$;io#sb;uMr_hI>U8&&8wnm!p)kUIg+iSP-2VfjButW53q)SN zTzG%KNyDyAzdlmxA0$4p2L#z2%kVL?K%g7E9dySXRb5z7(1lJr&CMV+b861Le3zV0 zW6Z^ZAo7`5g?RIzx+tO%@pWfd((&Yzgu*z7aNAk|j{7zi+{*1cZal62Tf>Wm-M7R- zPMDGNLYQucL;2x!@Jxqn!B(YG!9n>qEC1~$s^jqj9*4;J@7kB%hpq8MXh7D*DzU(x z++1+Ww-v3AQtDgA7DsowuY%{=e9gt-5Lr83=@#qzFaeQWa@o>NP8&~z?z!il5wd%% z0p!+70Fu-klaC}1;J$jr%$z%MM%MMoq~(VSA`GGtyNoT8K7M>zBJ12Un?z&Y8hHU& zXuw*avmtvXj+xfuPTj3np0mKBidC;W1xyEHxgbWjJ4OKTq?x^L&%SJ_SA4z1T$0)L z480dy!Yz)|qN$&5QpERVBx?{RzdH7>dX0J*gj0OV%= zyZ!dt{YYCIOG`7Ld>tmH*sBCYPL?4`r5oH@@Kr^lOcVr0qteDgvp%}W3JIRmvw*Vh zxLh9Ely^^WE0k(MrB2n3TYVB&IYhLuGj`4SI0ALD1_q$OlDiL))?7t*s7VFkZu$!jGSy&XsUgq(OBtR8DE8f(8p@IzYmGv zdsKjApfIeVXI$+9no$4?GsNs~fB=X&WU9tHhr!d~EYY+7svGm2k5x<`xKajkkwxy|tnn zs~m_#!D6Q%*eRk(zF{v}T!OTHB54M;<%^a`9CWul-tM+UeE(({+vNGv0v?Zu@LM<7 z+^>=kf0}6XXV20<6DEJd-hE;<^|-nH znKHnWj#%}ZMY(!Je)Ez=H8(1NBwbZumVmz2B5;u#uF9IrICoziop4H9`fI_TMO65dyED|{ozSnk~2>+E4c zA1Cckk@t?BmUFKc@K~~YuTR+>ar~6RAQN@ppWn$=J1)~zea>`t;khRy35HSm+Y zLi918&lmR#WV52NId{S|SrjhkC+?n2k$Y-u}5{)wJdU~?{bJ7)$PSz}V?XiV`R%8-*BKp4=t~Di+8VnX@5UI%eV^WF{6%*^w1scUXb7 z`~*?dTpKq#MmhmZ7M0w%m*igF@lwGZJf+|coD%uHK>eA_2!N>7GbD;NJNO9y@V*nB zo7qcgkeu6V^`N`y#k}xM2 z%j)IRbM6mj_KF3PEu~#5uV+(O48!YUIL7g~fX8)WUEQ{!7(eRoi#H+qg_CXECO6(~ z-!b9}h*8`)DvDztBv!C(At#E*WSWYJ0yMub=Z>11b;nKX@hQ90dom+6h)lNhvz7Y9 zRHC?eqRQ%y1CZYXAfNiwr`&)4_kY*r$_jU!m!4*ezT=|>f$;+#I7vX{rAS7>J+#HS z&7^E*i)$v^Q*2IUB^3@L5;vY#eycq zI|!3Coj~;2&we(P)g1>QACqy>O*h@-o_gvjw_w479Y7-qBq4}o<60L?ut(LXXo(0I zyGf!|PLcQK$m0>RC?DADd>)yb+oe!FuIZZ5dHHWqxQ*7NfE z#Q_lrL(Z3TATu=Y+{pbRshUE!M%l1qr2D?#3nl-p-&SxptsQcEut7?0Hq~~^xjkzG ziHr9a74zc}-t$D5&yhuW=#-rAx$&S_A$Px&cMk~PJR`?lx2@n)@CTzJ-)OOhfn&ivEd2hhWrObK zb$P#;CflpYB*s^rc#LeM&BKb-b9>{>ukZ~@&Ai^o9n|mK!TkV;bGv7Wn+=!ZANI^- zT2h+l=9_Q!MHGcDZ>Il8J1r3C2QoZ>yzOmotK!#nJAlUsZL_r7cK9=1L6qn85&^0A z?CWuhXJmXYgcr6uU*VP1MvBVOv}m0yGIHg&7vg+{VGys*7?~y+E7O9{>`cd?#D@si z^&+*uDcbeazKm}gh3~gjo*(2a#S5ukN^I?vnLR!|{As@hBV~7J z$Yt_{L^bBtW(oozfsAe$wHiRK_kP;n{mxf>JmQEWDri=jia=7*+v0s9nlFu8OM{5e48BnmUtoy%Dfq$!6XNyzqF!&vO}u*X25-Fa%V(5G2v3DY0a4zF2#P2oPeB$0 zVNp;PVF7nnb{Dwy7hOwjZB=*AyL;yR-*>iqW@n~mx~tFM=bSp{G~wI2d~Tw>K{-E> z75p_vt3K~|l}k1!K})kYZrsQaT2OU#APIyY*ggEfPAx7j9<+4nQY(Sj$U}Wx{lbe> zRq;iPH;H#5?F?t$M5nXw4&}L;1!d8k(9?1kg9|WT6=0qZr zXvNDJv<}~(-BpK)6DP9kufN{ywVqok!iA-6rKqoy@6Cx&jgxR8;(8&b^qWM?c1*hN zQEJ|m(#=xP2}HK#7G$B225zb=Dk_AOmzRreGUqvNp)bCKNk8z16k7T*908#02$&w{($BrFV zSCoFIX>v*Vb!(a2U<3kzkS+^E-LLMtI=M|I3JVJvR>Z10ITQrK57-*MS&W3j_uqd% zssrs66Y+X2fj}Uni#XJMHTH2bkbaUplC86_8WuP3(8Z2U4hMla7=u5OK-`plG!};% zKzAb*vO)+1Lb|LEU1zD}Atl)WfvCDTYy>h-63F7ki(L=2&fsn&VloTK3Ly{(L9jkd zZ_uhL}(r~jt+*sYqDj_R`Kp>==JWMyZCK5?D zKKS4RwRP>GFj^qMtM~%~*|cd>%od23iRcv%Arc`F2+1Z9+s&=+x=OY_Dkvxr1fuHT zP!b3~P{;?F^X<3avPFv)c^-T>5{YFM77+)5KuCr4q3bf6<&la8PzW3B-n|>Ua~vHU zP6Bc8GX8)-ii(P&&P-2)kQG885YlOV$ZfLq(7t{9MEZoPdqYhi{D95po7HR9tYJ?) z@r204bhAKAR*2~&+2S5<0d*n~0)dc9>qB*2rJ`)fh!G=1AzSENNB4%CKpgbtQi#=9 z^XJd^WOAKEyz)VGbCGgH2n0eptdDe&hxGC|v5!JW=O!IFDLTkSeDfmqK_GrD1zS9W zR%H~MIaDTO&)G> zk5nfO!tz+SaA7rFn^6S95AH6$`4=isf<(MbLO0TgH=s@=LLd-QK^}BQ;Qh*vCxNQ6KjB$7~U`5vjVKG1DYabJx}rPh-#+G)DNwP;XfEc!z3npvMRHjMr$48TL;VhD3>t&R@MCSL?YDJ3B`fd7)6d55?^wBVrMj~9`BW?O zly?s02lWfSnRej70XApO99MTzEPX*t^=E7@EplZM2!v>?4_!C8xvx^8S3n-c#l_g2 z=IGYUCJ+bD;m_>Zv#VkuCUP*Dj&3C4wIa)Fc`p$Ofk1F`U732Eo8lg+ihaP7qw3VG zAP|Qs^TC|&zWc7H$>_)O@MDGe$qgYAArJ^Q^3X}d)aS!gZ6g`LVx)ry4`Mez9o9!W zEfC;O(h70wT4+=7URH*UJlq;m5s4581UFq{N)}X!1Ck2;fqGYp_3@{49iyByo}2={ znN(6z!rpo39bZdsqltsqtP;Nk7l}j&1R{@Uk_A)cx&jbCE-fv^?j%Q-W*31t_yhjH zjRg|1oyb^-#4VWaw_X;J2!W8j5tmT?=Q;n#B$u zKJ0o@y;fzpSsprhsIRq=6@fs=ZX)qxd8E>H1sw_UP`V8krt=s@(ycHfkw_!HLDQ4g z&ph)C8$5V$jT50K7pl+May+D9Ew!7-*O>A}+{P!wj=et}XXkxdTI2bd9qO_^Epq7q z(eX>W_2HK|kSr)A6?o%~H`sH}i3L|ESJH)lzv6TruiOBfuwt&DpX9q!;l{e=nrm2_ zHf?IWPED*aDR-|VLbqf^DVQv^YSl85lOIqrmQS-c=a}tgqS|NDu`<8k@A0E!72RuO z#R}66tX{1`Cl>wllJPqV3ky}()syKakF>k6loOZFH)~g~Ud@IK8DcaeZO=uM8)A!n zxFs2?V-N_0?65%OzR3lij)Xzrp@$w~Xv_y)>FCU~BS`ix3S*UmWxcY}<=uea6Q|b2 zGS!z+YiQ{ciQHENLiQ1dTYFm7g_WvRSHfqtYnQ4k1A&|Xoxn1_`3U=pndV9e(OMyX zD)stluS*~hGLJmGbVj;g*Jr;i+6Nd#iRRQ#DfeLCB_cdzvXdyz@3+3H?Pdx=B{ zgv=!oKUPPoS6u-`MMZ4Jj2YE*BoT-i#et3ll)?BbL&MM)Ki^6$Uf^s zee4$ZkZ!9~G>MYH4fRJK3FpjqM@JHYSWadM-wek-2n3~y{oXlvnWAp4EE}2F%v=J2 zkU8WLZOkK834_3ApM9oUAHy9T85raS@TAU*L?VlJ?%df2By!0mmsBlhGbwMcfVylX zrQThrL38y^LN4Jmk%ij4N^S}1!}ML{Y**Pa_WN(EufK0sJHq~Wb{*C#FNa;;IQILa z4aG;#l~i*qzODS@+Xxv9s&>p9c%~RItO9$Bmx}{IW?d>&-v2 zce7m9{FEFz$tflelhu)~LWuyzgZ066As#M*ZVcLS3~JWVqg>W==B?Mgdv|vC-FI7+ zwwsc-z1C)as$?TqY^iIp^~NzoBB{r?uaB_b>?pVXABuVKS4SL0pTnMNdkX8^sFv@~ z|N6(H?2iRU*z`m804e?_|Nj&xk|#Ty!s_QnTt4#{`^}eSY+0$h+vBw#Dupoq{hvRp zHha9?DRjb}tRqF2VbjCrIo?jgM}GiT1kZmFXa1BaY|p5SeKGYtFFrcGmCe)*-< z)Qq-LuUjl6mY(Y5(n<~X{&b8D{=C$cJZdWXW9;%ZrNUbAgBANKS*OoRUCBeo3wtZs zUHp6D``HL-!1|v&$wSBQzo}sVwW*9we2UywZY4_Oek$W0n>TM}Z@&4ad^pU}i9{e# zoajk>vugeN^@2paCWTv*C_k}~c!`9NJd{)(XG7MOMWxtPz#{P?5h(iWH^io}K@Pv$ z_>5pP#P= zl4_8H7nwx6D$8#>T(U+|ga6&a@8HX7-u+#bm&4xfcnT}MwtbIRQWE>-c6Y3YCFG3JMB*<%0Mn5?gD^SS=BzP=&cC8Q zyRl{Ms^<`gc<|i1LLr;*1uQbzIZm_s5AT0~E$6CC3;{a)J^PQbW3wZUjUGMoY&CDT&caDWPTk&~s$7*GK{2S?Ru9MPtw=d7%<((c8i+&QVqc?UUWE6EW*G^C{ z10;*ms(3;{DXid&@fUt8^d+N_tl@sC#jzx5J}B3ne!kEnkL3T#s?IxBI}{9tzKnlu z(!a=E*nKpMinihZQ6wB-->Tmolku9FV5{dYU%d|0t}*0ytb`j}API>}Rw#4?Ez~^6 z#Iu4s1P1T+qV>Cz4(|E8uznDx#8v)~04f%_oI>EX;PwSkCktTjJ*e zPL+5FRP5DW>xeA=J^SujY)-Ya+Tu!A8>EVP{BzW`r5iP~n)K)Dwc+TdE=f% z2h5yeZWXlE4VrVO6($EEuTlVAzR`K9i^beFPSMoVR69X=2bnDrCs?03OSB}Wlz)j< zy%zC{)knqHZ;%|0$G)PME%L7RjrLPFRXd#5eA_tf6HqL{FnMo;iKqD3(C_yH@lHOT zmVVsI>}P-2RkvoDf=GOI{eQyACk{6g{bQG@>6E#Cb;P~ZYKNKR zyUYw}QJPKSI6B3ufgIHYRcK6RiTZAFXFH$m5c!4^)xi0epu4|x)sg)27#^eePb~da zB@N0#rU94k^?g-8Mw~y(sP}Mo>-j!XWI5(gN$wOH$u^i~+}Y$!ke3Um_-cr8gfK?X zevz&wmD24B*o`xuI!l1+9ZD0hROZIQqBM({yGm^vIIR644}!mbcwMU7NXq+FA& z4c)0*3vsJFUP}A=oq0A_$^LM;*IDDKRC4$L*+E~B2#&UmaX>?W=S<_w2UQl5B={;o zEzikhX_M4vE%Qw?h{8)B2KJvca!HNGYw=gko~U~K!zVtGy2zH`@!BrPy}vrLvO~VQWO#1unBCZIP=n}uA!ZV z(ht0)I>EQ&AJjN#0@tyDXiXjHO5C|F33PCb?nmNyQI9^W0a|uc5j(^6o zF?bSK8Z_wAy!TomB^3IEElVO_^Ma+1c`3wxriO2~cm3<{Y@@CQb!jyGU?KUHP?9)J zNy#Zq;I_K~P7g_zKvWwoFz{T8yw4QHP6>O54Uzu+DfEv0ini%5|4s>{Eg5>H=J1Vt z5o%IR=u^(8qIBra-V-t2s-BSiIc8DR`jn7GJE#J|PeuI`g@Z&O=pQh@tc3h#kqtB~ zk-8N{xH7MWoeT*OFN<-%^MZg;#UbmLw(h`mo9Bb=*X`^!nLkjr4y57uGkJq*d(Z8h zzH%;$cnGr>Xznl&6BOgpK#p;B)62)D5mFaChDOW|dQ7gZtxdie$-X7$G#BzZAz*B! zPEZr0`~bZh=3C4OFWl2hA>Fb>N7*YO5-lbC{FzUmBJW5RF%fQB=Q~zkAcq9B)IkGT zs`&du02HgC_-IWif-rI})`fNWVc3b(quR}*GW&Kust#`kec=)9%Q<83s;-K`Q*^s0 zUo86gBz(o=J)?;F*zr=UQWJAS6c}+2Q5nJW67+dBmQ^aV+WuGk$sWF?t5;R~Ft)@g zr-U$xZ%dL6+WQ49`{Y@a_xpj(k4WxS{wND~8t(N%d3v>$96lrgjYX9K3KMv87?5#T z=;z^UGu9IMC4h9(Zo}%t;+M09fl&Snp3NotT-VHefny0C$ung&XjMsjJT2$*3-RH5 zWDUJCQtQR4!%g@9+fUDmyXBh@u>V`8?Kt9b(#iCbFXBv6(|Z0bC`F2sYE{dj>zG+1PLHJCfo-}y zMFl^e@jw6f)41t8gNbx-PQm<lvsGJF#S7RMgTFmCN}zrH+V!}PaCTjunc@U63^LM6VM z)87>g*h_`;HRJD63MLa#)?I%N@d3M~=EO5&1Kx|nWlzT4^X5ByyEkII6s74u)86WS zww|vlCW2G_cC^xWrqg`?HvPm44ax5eBA9edY7=*bjRQzqPkp_P0F{t1+zhp4z2RQ( zwpfl+&70hoCi-Mo?Rhjv z`|YU;QW1lu_yl{7YWwjE$(>4^6+5 z?y1k>5&ZD-`a*-P_ajryAs7QyDKaKvV>HqA|KpkOU^A{yg2-657?u(dZHNFPK-5mR z@U!nO0Xk>?WoUvd+`p%TPVLj#9B!OC`1Q(1z|dhZyxf(PgWB zboxNLjzL+|DT7hDxy*B9(?A|=6i2q1bc2!x%nI&Y$8qbU82F>H?uM}~2cZe^p{B0M zz~I|U8`GP^967^zUVUx zlcV<*K6gpF!IeD5?W{grFR@;m8?XIdXje6IPtpM`s&=NGhy0<*xa0g10KX44xNjwE zt8@;~=Do_^QVQe7t@Bjo%h0kV^s>)yAG&pm2MXCufDbjr$mbI`)=)-bQ^vh3{YgA} zk<{D8z3Q9tz~SFN#LY)mN?nPQ$mR;Uhd2yW$6~NC*ivUV^)B{!zQ}NVg~(#siLv3* zL59bCFH68bXRT8-{Qg#lT0OiPV7Z;9{~*_LRatJHZ!y{V#$Q4$g=e3~%K#cd1XBkc zFUfsHp1Gads_|h3Zw|$R82`HdtYQK`p2>w6v;q864k9` z90K7y)Koc5t?bJa?<)y5hZyVjJPf#4Nx47C<{2)=m2!aJx308Fr%! z-XoLfaf8&JoM+L>8{d&>3;7k47y6DJ1y8@8B5_$9d#8wSMQ{Np#a!Q8BMXiaO5UNn z42cqy#Wk|6zQh^s3X~&k`ES|GUk0MlnTnNWLpkI)5~}b?dF+ZOb@}yV%azVptx|VrL77jj&<`yoc_fuoo)$3d z0W{23CrOG&p86QRup zqiX8EShv2Zr>T~RoQdT7IcjETfVND9(L3e?CSq~N*rK$F1+4~Gyfzb$fzD)sLz2#l zQa0gt=^q~Y-_<3SEuUifA8>J5#HC@BHJsJF*py{8{}yg}A->S+V|Z$`4?m@m1`R9@ zpIstAA7}O6aGic4x|B#PY$zA=rt*rr1&!h$3NjhYQvl?cDIoK-{f1Kc^{==AA0>6c z6xpL^mN-bbL^cOSt+ZF1d(1Xs{rD*G!kOc`Iq5NVxb&jd^lKZ>CEKqVoQhWK!=DB8 z+SC#wcAp`HMN-V*BYM7C-|^dw9cZS}(7ZL&hglu(pU8UFG0RYV;MIyUvMu!2@K7)q z;f@eYa@_5D3SM3aV=cv1WX6!sp%JFeP!t@e6fNeQAFFimYW1FQ6mc9D!{|JvjLW}Z zm5~6RPge1_fM`c@@nK#m4vdgQxbWZ`qb{#5|B^#(CreFWi4n?(Km;Vss6Se9ER$V$ zl8$Ts$9*{{RvqTeAEpk&bh2@*cfSFW8iFKV4W@Hl`x$YbJwy^INktqlr=8V}bu3XZ ziSyVWtMF#p$oyy%qF?ki3g;sx(<^q=fbPZW~jF!;@0O_2HUI<>dPU z-=DNQ6$9MWE>6|d*2ANtl@~NyFd|Y=#PJH_xtbl@f>Tc%+xBKr8AZBa1upFB-tgb5 zv_%9r7OjHFoqZwlE=z)!^#K&rhBf|kfUgX!()0a< zZU+TwbY7@0E{OXE)y!4f(Fe~0&@k+u%);lIq;dhp(2y!cRcB~ZNhyL-$3lx=$#5+X z>AW6FG42^=1UFB8=yhl%!y@jnOY;t#GIOyvi+&Ws8~4co4i5aW=t4}h_PZ_k7vRbe z`E)a?u|#Z=J-RjB6c0V=wqi!u++nm$?UAb%&SmVQVM-yQ*PV(P@4>PE%2i5O7HC~e z5kL3mvgLut!KizSZWNpxh1geox2MG$<-|8C`5y{rAXxP7>EO5xQhZw|l!rB?&TmTpOQQ@Vl zJMNat;pHEJ%989#s&{-B8UqV)W#mT{hP4>xG%(5{{F5DQa#pIn*YQ1))-$Qd;(H#!Ai+D^nwgY=JZTK~i?}*d(33$A7BJ ze|1Ds_zXt)?qzX-vdzN0ze+VtHf3&;kjomrHSJ`}>Zhrk10K7#vXu@0h)r~I;QblY4?_wm+Q<_#k z&vy3tpsQG4pFKwVT)6TfRR{|n{1-Z-4^++IhIV2-!(POIJ^(eg4^q+7!QhbALgh~O z;3i6zoCoaTxn?c?_cdU`D)R0YUf-4eJ?>IQ8+exqu5(T%8rt+xV)L_-)YfK?KLRXg z@S;;Bj3}y3LOE|TLK^()yVoOp{^48weAPTh~97j0+df zWF3NzQlMcW>xl~|zDM-@CI{z0{cS45O$ZB$9lm$}(QQTIAfj-Ja^70{Ne%1VTjA2+ z18UM@9DQ9?7c&YBI-sf^MRD;mu-jV(K7;_odeH{3e-ZM+|Eb9d&~axx5N^3(T}_Ns zxIVc-gU{{i5PyH^J@`ju6Rrd`cB76Y6zv{ec4`fN1LAu+5Nmg3Qna(JU>1jvD)j+a} z%s98-Y1fM`A}k_r#uWA0fhe|xo~akd&5b^^Y+ckZH^;(a7Z4?KJvM_L$Q!{IYi0D z7pWAyv#l#o4<^dGWzxihd29GHF_%08yM-kU4NW-^88_qYmp3w?0Ck!z3npx(nz#ww zT&se}?-R{Z@sPlb7bcY}!t~7igl;Zd6#&jN?&7;WyK~=GeiqdquQcR^mp+93ty)PbCA71C|lDIIt)H7#R-~i4Ot#X8yI|59>QAUdCZe*+mh+?wGS0 zjdsMmL>3Z6fyED%G~szRLz%L}+mSSmtFhQGYCO8dkRXsWM*uNtz)~IWvbW`36Cn^B z5~PlBpNjaRdsaccvV6<1#=h74c)g7UcF@M zwkzs%nF|h@iDR{cy3tqlr8f`UxO*%`GzySW~^M z#5x!e`zcKhz&o~)mlt|2Snl$MEoH@+WIOMdDvXFNH$nANlebHKx{4#eN8z=?>@pxm z&+n=0l>pZ0k4*aYx7m593{&TE3InRKrK}nj12Wpi3DPl8E z4+GRbKBmgk7JIwhGhOS5$~F1+Hv63205-p;;M{&A-0&IIfMxFE2GT_SWA#6m@tcA3 z^VAo&C!4K(v(<$Hv$5cy=~UylKCir-odIr*bdp**R9YI^bN&|^vrt0S48xOARU@zi~y`SXfD4fe0$GV;}3EI35EJb?_pv+h9T@?_{& zJMF~J9X}XkIlnas7es4*m*Fww*U8jGnVDH5g%yyCgRBW}TV7p_6f@U6kH05P5EmIq zrfXop^%A0a8aF!noXY3M1q!2Nr8AhBYCi$>%aFP7NyAtOv|3LpYqwVFCcz&^vq ziN5T$L>_8sKD{TYG3b_?$8G~UF6#JR)s1sxmMOb)Ja`^%7G>@BmK4~|vfiX$Zs8xh zg06}=jTu~J;C&l|HH7qy{2I6h<2T|#S1nX)?9Mo*mFEj!Up4EPM6ph%)P5O$l61b& zZxy^`uSj^q;r=Cl3N~TVARs{l98#=6!AwL1xv4uRV4`@L~rCu0KOqa?46E|oP8`X4SC52647 diff --git a/openpype/resources/app_icons/celaction_remotel.png b/openpype/resources/app_icons/celaction_remotel.png deleted file mode 100644 index 320e8173eb90df65e0363f8784e4f034da0c76b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36400 zcmaHSWmKHa((T}byOUuU+&wr98Z=0P26sYm_rcxWHAaA7L4(TxAy^VTxRanEINXQ# zob!G6$31t}VvW$%UEN(>wQE;Dv06``;N#HXfIuL86=ek-5C|pZ;SY=n{DSCuJPN!~ zdny`uK6A14^s#Wa0m)jsSlU2UoGk2YbZjiF{oICaBtalF7Ki5so(3A~;#MwBJQfda zczm5)0cj9OQpVTS!phOc6KZK==in>_J8bWQK^?56VEV!ud>XFuHuet6{_Zx<{GUFz z@^`cnvxdn?LnVF1fexH(JT0KUPOqFj#C@e;|MV*ke17|aV5XlOy@ zUEFP;!aR?;t@wq6p(0{D{Er2N#KgFu0(|_UynLd({DR#4kHv-f#03PP|9-)M(cGv{yi4(O$ui3>FFxY%j@Ig!{Z~!*$bD8%}%gM&s^FOlI|0^r} zUu7T6!O0aES;5BL!P~}K$=$^X`p>k*9sav60{^wYf6H3`cU=Vkt1K^|4DZ9{{$HE@ zpPv8=dieN1rVD)eALF-i1}xqkFm0J3B3uwCu0%ya_POu;uR^R@Cf&?)$0!tQ(6E>$ zR1nW$kRjGO3WZuLjEkBWLwlxi?+T?oYCxFxgK`F)LP|lnLMQ`)*ys?{PZ?(3efDAb z_1J#ym+HOBy~=~V1COHZ&PPiJHI*;6i|$p<3oPz0kTNjFhgW%-(Bhl!p@$c!|G&Te zKmYlT)jmQaRJ&`k)}k*fi9SntaP8QD3lcwQRqD%?CdorZC@oSoM4xJtS)Igu%6}1-D%}B^Pu88nv(7?17V2O z8b61hF*78+zO(tej+&wxPpfs~#aeuO-K=H2VmwcqedAFtZhPH?9IhfRPm6iupDDE` z^cUS&WtZE<%Ti+!ho2`GJ;r|z8hd;t$EUw5iax);JG?LJy1zY~T)z7&B64?<-K7w6 zcUoNTv*gF>zhB*4Mrjtv?|=GLq+Ddlm(lq~jl(bxcMyBF1vhS`gi|`rzJh4%HP`Z*arc$MJdw78oK~FPz0h zvP`@Uv2gH+Z%|PkGPVVO2b)Qs_Tuuk90N)!j!R!x(T+==CeVs32R5L+#xwVmgd`8H zTPS^YE*7fc7Xb0oDAaZj2=PGn2dxTytH_h_Z^kX8s@nD{Cv*6%$|lR7wDZ6G{?0yA zSC1MswlX4s6~q(|%E}eAQxdctjVbcqV;3n3JQ?0OM$-FGs3Q)Nkn+blkmT$2WBBJR zGob?kV2r&d69&vEu~O@G`S&4k%S7Snm}y}7?144#2|ov=I^&)@L(ueRt&*j-2FsJB zX{2tB`X(8ZCD*!{;ay2cF@)OZ8atX9YH9@48HYI+t&DmMGU|Ai4YDwDAEQ~-e$Y1A z;C7iI0Kj`YuPAC>6=Ai37n9@$$(@XLo4%z6t zNd572JApG151x>RL!~Slw$RW0I3CG!b>C7rZ59$FLLj;VqTD%nvbt1lMWiWq_WfCT zz(Fe#f6)SR%K{NWt|$2}LOx#K3)TofOq0c)i(2MYngM0>-aU82sKBHWt&@<@<5_dG zY;2q7GOK^vt`FMF0c{=aCGT%e?~qAqRT(d=sE^XV=B1*N7$V3in z@GJ}Do}?q2e-rFz8PDAmlO%m*PdU!Fx)g%+z1n?S4s3=%+wmZ6;g7PI(Hg-uHw`Y{ zyO$sOAC4AQ(%D+{@3p!RNxk(BMG8ZvdBEt)tDELI=H0(=m)BV#^Hv@qtR#>7Nhi-{ z%;-&getJ!QnlYvA_&e`-Qioh~QVngf#xJ23ZP;cK7fcX+Gibh6-<4dH&GVZ+&zUH?(u*$E60T{b2*L{NB}F7j)z5 zE9tFn`b6SaZQn%6^B8PX38fF?TDF^UoHbN5s!_%i$yVz#`tvUZfvu)rDXHX+_YX4t z*s>NknZf?7?2*qRf1adYJ+`8R(yo#S6O+byCsZcMie&;WsXtNTz`UV--|Zya*O=-{j@>I^ z8o>4mTqoq$KOBo{E%nWd?pCB5WB()7mLxl#9!?-WWXK%%GzrOHSLL^;_ZHY%k5H@M z7pwcnv+*-ztczYVCgty+y=~*S?2k#pA(a?3hlC@pBPmUH1~Dj&SK?$?JiR45Nxu|e zIeh{~6UG(oq)z_nr!gai2oK$~kef`JthO?b1;Oydg;_jJCGWl=`a%8g+PizAWcHqE zzwH0yd<#U614Q_9(8L`pO1WNu_w^89+kgGmZ*<>3_km?^Z9hlLpPjMTncgz`BG-s)P@gF*h5Q-T#r6f z8piC#pfG^W1O8%hf+8B5OG4>ea?jXbk8!D)M48)N%@g9sOu^h)DjyeKih%elDJ0om zU3BS99;7?2~hF6TQ;%s z5NRd(5hbDHd}$#x+TpWzLa-Pn)P35}t`$EP`zK{`qSfV}Bo!1iqH@a;{*XVWy2e!m zG@`AhfbFqLA7kq?&pga&ISDv7E(&jpgWJ$Z{Ii%8!Z}BV?*+1SU2;A7K>Q<^gq7EW<$|Q;ETD`gI_(B|jD0Bzy4WDog$nkm$kseb00VCNvz6Zhw%Fb};fKCOwrpd+z6GBuS8DGkeRT0i0rdNl;R_ zZztda6|v|8YdPgaW{f>@`Rc4@`E_;VQtBzy`A?r>v{cfkq6}C2X7_tbamYK{L@LA|L0>xh zj+v0f*PE!2mg&Q3`dSl2GEvn zEtrSn)47`a<@_^|5&FLD7oS-kyDz#S70(ytpZtt@^ebTS(Cn4Y5U4`GC%oxZ&F;nB z^&x%G)wklL$BVA=q%7We5=xPvvf}lTUJ~tGqip1TbC{d(Bsf28zB@dtK3(6);q^{! zAAyI}#!#wrC>1sJ4|!{$H5yZ^MB%dH`E{FZN>r~Ma5Mt)JKf z5uKWUP1XaYY)Y;w~!6lm@65g-=d@H^ax*FCDwj{N( zem?I1<*b$dcIU}ta>&hk42f)&#qQo?6gG+c1$3i=aHf77iSWL`bE zK4Sfw<`d&RSBxo+(1c|tS@PGly6Lib9-7ZysXYAuJ+;# zo8#L?p=H2Y7ufw;a?FCxbeH_u9)k7t@ph5Gt}RGM#8*9zz9ummcQBDwWGobmS>;vf z0?@%%Od_#CYP*6}%Zy)^l&@lu)5mr$BJA14qmjCJlFIMf@Q*-z=AGAvUCVoh)YM)s z8WeW~2)XEShi`9MP4*=<$$A&kgq^KHI%4H(yd4qSW&jo}e}IQc)20DV`BaSMn5Zf= zwHMPvMvm3wLFetuUZxQdJax0m!S#hZmpP}E7ZxupADLZ_=r6LBUq0+T8Uz(jafW&M zMd0y3xl73P0`Y!6{2K6JO;2tl;`{F#)VUkr^OmE;$1P|_C7P%FNIKG>vkzv%M|=W% zD4Z2LK3x|Byy8%5;kdv1lTB4Y)`+jK20t4TNoAH&BrV1JRLPQ?Agr2ydN;)K3%rEx znzzv#oZlaIZOe24K)8_E{jlTw=$eV!D`b=puXm+YRv#n%c1_j2H%U)K-@sE_ws6@(654;vd;Ts>Zqee~fla9g6a zyKiXd9r1{n!@i=-@&jf7u%Gz6Mf4-zRyi1H!YHc4Fv78)&}YCZSjTg2+aj+F@C1{z z$Zk|(BJ9=%ISS>h5Boh>^a(6a3x>WmuhnVa+%d8+e??V6BlI6#Ys(8+4m#&XhbSp2 zeXWHzlEcPtk)IVR?;f0U(HMP)zP#h3hZzyASLY*;;Fm^0`!AO3Rrm$=-1yk#9F}tK z0c>yP*^PpAUixPQvA8w?i3F`374;c)N`)cYoz)PD;mh~0>0;>IhZs(# zJ1}8r1khOaz3%Y4M}Rkf-q=r~Haj9Hu!nX8Mp2Q-$DgaaOLPhB!%7YHw!?o#k%f~Z z@dfS3;8oqgt4(U^Gh!7My?Ld+s#{p6Fl?M1P1FA#o3#LXZ<##giI&oXk}OPeL?t_l z)=Zui{aWtfxZL{S(JL_hXa=Y3Y}26;mHrxzWi1+damWG{aAogA+w8TB_LIC5@pmbX z{?Y{N>s{1SQVQxg2oF8eDv{z{9lL4{xo)<|USG#Mg(gWpWvD);sv(qHmW9_MaUv(q{V3nLjoqa>b%+ivMcSMuPgbLZNkZ3)RU^-ub9iV7z7fR8 zn^U2CT*zk;w*N{r`11lpfFV^16Dl>z)%eEqYsrzWk**1l0mHJCCr$QWIJesTiR2Ar zFtW+i^>thbc1f1_oSbW|dQ*$##4(&zYpVrVm>*Xf3_)C$9Q>zza$EYm)jm~`Vs#S| z%OwEXWq8xA>4iPMYgY4e7C6+@nFM8TL!%C-=k1Dv${3SG<(Ar*QdNNQyQ#|8qaFS3 zHhDwlP-0R6_X$>zGuI(Ap0b1)FcjbZvfJGJ6#Gg5c&iHA*7}@1i_N#{#(q`bScr*bvg#9v*0U+*HiUub1s|}_A zWZJSAtFRvkoC$FT42H62_3-*Z8poC(*wCBcFC#QdPt>~~Yh`;W@-#gt^tL)hsBEoy zWMqB`RAGo$YkT9$K8`}TMh~okl45m=w+Qu{$)bRR-L^s&PX}0of5?)5M?pX!BlxK{ zlbg^FuaJvAdL_muNsZ(d*4EEsQX(^6&TKAkN?nG$>x#K64jvD=`@7)Mj#~4!(3^(0 zIBtm(<=&fj+)#$>rVV^#o-HN;r!+7RU5T!G8wU6T3#9!S^)6{yk3tu+q&Ox}1|NP= z(`4%Fa(B6%Od$1VVkg(GDAu`UHEJ^U*ZiC8NEgla+O_=C(+i7zAT?<#a17IZP~B-L zW3e2s8pwQWm=E4*m;1JQ!TRvvZLZ~AG%|D}iPp&Njp6jf>%@cv4iqAK^?GS~|02bQ zp34<#;)Oi(HN!lRQ;-cqJm1>Vydu zUYg)P4qu4rJ9yfLO9Sk=7l~wLTr`~Np3ZkA^yH*KVUQZ}UBPb9N}6Ww!wKkNTA#vW zf4pnm#&)SjboFCeTSLZvdHdk= zgZ9Id>5GewYkYVmN=iVk)}kH~67cH%a&4jx5Fnx+m5=h@nC!k{`hFOVh#23|h^X$C z|0jxgo(!{|{^`89JccC3ly|-7L+5hHijjPqH`sOarAq6+8(5HWBZ#_=2Z&-VRs+p zKdhxmvz#1H6|6gzA|UHr<~UYUVI7&gy&DzFB?Mx3Er0jik8T_#*%}6zNCJ&TWJD1q z1~I+z9}jUVR2cT7>>&lTlmdmwk<^xIE}I!(2^~N>{fWBM)j%#BpxA!cDZ%@MMPL%d zgv6g`2%fba&Ea>B-U5gc8)HXn>+kl^<7G}9ItDfLIV8bZroL%z_5xEq(0u*PD7{Qe zA_R-h72XJJAnjYgHqIi*VfWvX6^D}()eA(mM+)Fb7J(@cQ{>-d3gK1ZLUp%8ZaCG5t8F&!VYH}uZu>2G*jWS zR#tj!OOU5C!c??ay%z2Gjpe(l@GKBifUNJ4ylU=FPk@TlhtWL4lY6b|Dy`+S&1^H% zyAH3LB`@31QI+NG@zf^}hss1?qsEP3S5khEtEZ5uk`utoS`ZjIfKJAT`?);MYTjX3 zSp!p#UrEJ)`Jw%CFfIM*y}(^osQAaUuu7?=K1yT-lZ#F!FnRmcGQE_4quc9;9?@b6 z=@hvL>6Lw-(jEdC@5qZ~HETPckL?TmDZV7LP>-D?i36&_;H%w9Ib3e*MSEa~XJm1a zJ2}dFgv6J@ddoz9fe&=uc+i2zq}NO<(U9Bmb@9QLSMrX4Zxz@VF=9X;&sDFyXUC1O zL)HR#JI@~v{d1GF?^h*w(W&2UE>X;oj}X=FGRYSVjDTzt@7S8IJO8C8-oYZl1wXi{ zmz*f4n=ks5sGEM%KKO2dPaGbwVh7~Z<<1kCy{N2GG%V;bQw@d8uZC_^Tq<86Xq;iV zUO-8%LUvARpn=_BDJ=L_@v@WZGHdEdt1O)c5pbi*ZhQ*p_B*jhh zFF)9iWUzY-O@*SQ_)bk@N>G@1YGXkk%=y+f@%e5qjv)(Z2w*%Fs-#GA#rY_PL@xXO z-Z(YQ4T_EdHtK?4#8s&LRo=P#CjTYpMm`z}6kF$@^MNd-2e{(IeZbbQ2$bcZB&fOG z+{BCbDu4G5e%8vhS%d}_${>&cN$Id?3oLN+v@(r(MX0j2)|~Hpq8Hxf{-dt3r_M|X ztad*xR{3wdw+nrWaM6L)pgtoVUP@|AjP!qIVN|LZf@Wc#UOK_uyh!rMd)8tS81{*2 z4MoV-@lzAWw<09LnKA&Swog|_xX^F?&t_b0Qi1&{cSn?ZCsMvfI?&?2ruo=4sr7=b ze$MrQJ#AVJ4tSfvD$ryV)Ge{q8^`Mzd9m8@p~h$ z@4<}lNm9sSW6A17`V^-Hg+?5O4<#OL@>-IP~zqV0(6T@`*h zk5yi=Q1bfsLj+2xc)|2LjgSxhTl%t;Q0jn`UFffsyI-~PLVhRK>kz=XhGHas4v?!v zO(z5bzq7sJq9f=;1%>FJ3WFuL%M0OroB{YB4-9DWFLqEYZ(zrT#Rpw|lOO-o9I8^- zILNS$QCL52mO20@GFi{ihP=o*b7-Te4!)SB?5&gpk(w8Ms7|oOxwn5D35yCY{-uw@ z{W5pa7wD}Z&h#4hpIKA8T4(bP*UO!Np5)QS*F<9fE*{!?`SHVEF!Wd@&E1RL!CK$g zE%LXd2ylerwh|0?mY^BV*$f}O##3OoayQ%mkcbnqSc(7zd+001e79_)Yx?Zb)UDZt zd{=VCXIHh#`lv25!LA?IcN(?9SpSN5HC8)!5X0%HEPV$M3^{Qt*PoWU4MZWI#$lFx zFany-iIJjeVsISGODgkACE4^R-$QU6vR8geAT^yi_@NjLN`F(bfKzV=E6>M<^0&gc zUbu94HTGcAet(Jcrgdp#5mA*tGo00>bT~5=w-mxyZiEXbKWVFikudqP zQ~B;CSKpNfd7$s`zb3t{RrA zO?oMb%eQgcc%fv#wAFXyMai|*szh$9%>LK(XXv&05;}oW(R<64QK>; zU%IaLK1}AjbnX^)0X+3VP^??v3L1S31pwLX0Y;<7e-p^x_uvsZUjY}peG{eh6{?L> z$rI#c{hds$qB2UyVSpS1o(0679e|fA4y#I%5o~av7@EA91DNSD0J7=f5HNf@1ajdQ zf8NU64x%A+u{XTV|t^!=P~@9k0_U$kAWl;~~_tKluAw&2OU5)#}2 zz(72JqdIp0(lCL-)Yrl%d6o_%=if}s)g<*PsrEm;X1sE3K-d)V6y8@#+pTx?9ztMbO28kWV$m89 zh-M)x`DWIOF+;}ER{F-)daFNw`ENGprK}(Aq@G=VUdpO`PXe5GZxu)^ zL&jW5Uqpp&2|C?`!W`HG7(tBa65yX!(50XytOhJ8E206~6MAj|DcYPIHcd0hmEJT8Mhed3)Q>PoBw3UrYkg~&LYjc8G#e#<)4 zh_3%#_D_T0tkT1=lg!uN;7v-uyrEyX`KZu^CS2&<<3Mx~RJ3M{EUY{>MA^AdilX8) znR(q-o}&_gYkgK^DX6AX?ynhS!)sBAYIzO-V7vp6nLc6hGcCye5fn*Z`ma>*o&mW! z<-pUm^zjcER5sseT|TdJJw@F?L=p{?0%`k;#xe2zmPBnfu;F=A_Rz(fHov{hR=m*Y z&&7Iq6>^}tCSt&!(7h~iG+7`(L@erokO2SB$*N^7Sn_zCja>6v#zTxm0qcP`@`DXK zW#M48usQ&fw+fk~&-_zi0ZJ`AV`bqdH^n79)40p8y_f>bUfFp*pX>2Wzo5sq;%!YP_9qNg_#6?MBKM6sGUEcPhO^P4Rofa1 z<#_?ASJ!L>Y1?Y&dvmNifBxduR*yQxdz+1WNpE1OHidkuvLg9T-BiwW?Y#v)bN%(SGun{CUEq#9 z{mbhpXhL!DmBV(^L-J@`Ys`Wzi=xcc{;Qtg-9PvyaxbpSP((me*pvZv!*DZ=lTi=vzW~zHhEJ@_P306QZkHhRYBiTR*B4FH_fs> z%H?lGSo2Ve$Loy0&R|O0_UHMqMtF&jZAv-!R{MJ5LZp|aGUXLuo{uEt5jM1$ET7aH#OLtd%<2vpfDV;bTR zRBPoNdrPF+7L8)~D@ro_i@fuSDuS6f*Ya0M1|1pGrF$rP_sd$!e*IhNNego1?(nQHGDX&qz31^fo{q!;&X4s4<_-AzmXvna+A!A8Ld* zIQ_~3cch$+j@eW}F=;<#0QRt-vgOrHE$%_SHwyYz3*gWSl4&)}G+_yd_BdFgB#8&u z@R5zno8-0noQUPDj-x=q;bhEAYl=XeU&fm7z;jnbt`kxp|cO8 zNzSM%wh10U>+>e|#5B1?#@c@lm}1xu-9Qs!{6G_yL9R6gjPie@pnODhfmu~zRoYeX zt;m4?L1TevfnrpaySlg&-0g_`!hpE^e5(wN5|sw&M)mM$gRUcrnXflE zAs54Pe&ERL!vWPRo}r_Qi*c_nsJY$rX74DyOpa+7o_tc4agEc9>Mn;f_AU48XJf8( z*;>Tp!`&O_s5O4`-o#Z`;ZV`zV%|&Fv~YwS6ky?EQPNNlsjoW5fl1MS|Ij!4y_Gf4 z1Z3V~6Y2O_8m`yM?x=Ol3=}v#Y9rZDt~Z*+2Q^quc&nplM@vz(&O2L;q}{8W93ZUi zB!m@E!oxVGPUA(FH25PPyfuz8gI&ycQ(Hw$utvzNK4(vK+43-!|7*^+Gor{Zc{e7Fr?gc`Op1cQXLy& zdUOkyIiX$|)57K6FgU+B@Sx%6r7ITl1aW=s*l*^Xs7p_IfhQgPv4!Q9`hvOy6@EeQ zXuj4TD}PW}-`|>!f=$FoxME_W-JU6jk;9*(1}_woZ7OMDkarEW5^mhbSUE>=gyRxo zU-!;?y?>keICRFwiu(qI9!fcoX&RLijt+nPiv{uOIVSuDL>=rxflDpw@54+(IM4jv z4(v3UC7);h)95W&h(ZWVunt1cb5QVo`#X-p)w<5H`n@_-ed{R}>9?eKsJsWIh!&)0 zTE$YkM2eA5RD>adOD zi$Zq?ZADGg=EBQ>UE~BrgW!DbE<_pbPT*Ij9zh5uxlGv+(e@+xYqS1M4%urVM@BOG z$w)b2vzRpED}tHfOOJ#4TR`GX@^4Hvyajw2lXvd68i zw?Alx0AbIogG6E4y|3eDu}nP(@9at+1{qSrB>T@beud8B7zHd=k$;TH{_#7 zT~Ktz^~}9dFa=7AI5#0WdFMSY@8^lk8CiR5x1u91H-2>NYS4;R6(Mnxd&q?eJi=p3 zyso|lQpIP?Xkgt^2cic1Oaf%-?ld0if*Ga30C1zNfi&(WiPueGD$`9j1^dy{{=5L-4Z< zHUZ(>5Cj@g&!-q%Tx!OXNCkY5O>xlQSnoNT>?XlWEz}qnRRQ#ElflAFa;=YfAR97T zt_FoHA3FG7G7$TIrzJmB(twVOELV~$H*yHW{aM6Z+b17c++PCu`EtcrLPT^e%=5(d zhDO29R4C$}FDh#Fq9L#&T)XQPsY;yUDB_^97p2vE1MwL0Rp`MzI3GCpyNewWxU1pa zjEM%uX^U#;Tu@9RXunvt9*=MhsRi4QNQN52k>ivx^+naVXv^H%9=*L56dh1C8PVo^ z13W`Gg3m@Xok$O|J$i0*oOxKr-9Ye(nKhs@_;%k+BL3+eq1H+e^q{XBH&iiOag=$6 z&|EE1d`sK^8Z?w4FQ=`j-J_anE)y4fAebJ_`-f&d_-KKR;)orao9@{`NU9%>lX#8V zQcrn-2kguEAr*iT6s!_{ns)-XG)n&TlHD`O!Cl5ei4Evlc=NcZ7F1{zuQ#qHGt94K z8DvgzETXPSLfXJ=5v625LcCzK{?K7J1ux+TQf{li~=56yIz1H zxuUy9a`8i@7H*Y=?_^`iADHZ@z`5na{ua&NvJ{2p;_sxl4Lx>Ap2&6dWmX5N+zpEy zd3H*Z{mEii*}irNr6ZVcuyQKbrvPgjb4~+niD*@idt8ha^NyrPO11>8Q%6^hi#S43 za*zV0`%U2mbsSL_>oE^Py*(qoU;d=6bG{USZa)lz>u!GH=R)WY2c8;2*B6To*3($j z{LsYPkXYReJvOQToJjp;3W ztuJl9)};_9*v}fhQ&A#B{VpTYXd^TO+!L_?!!J68DCHTECglMf8Lb3CMPNf?wHA3Q z15~0cv^f#$ST|E}kdxQNX@O60zY}_jE4MMJTdS2 zCc_Xg^>?C?WHwD;{LszV_`A4w#sF5Eks>kCw=NRZs9d@6$!@kLha)9gU?erY5foQ6y+7|^Gap9-m416h$N6Wbz? zNa?@)c7C87#W8MAnKSWj0p=u3t)pCkG?`^4?T&6yP~0jZ%bw$bx~D8}v{kSYV-p^! z7G1B4q^+F>&wI^FaW6z;i}<5L_q|Er#soR47@X=;-&Gz*Vb>91OY5yh*>m9gZvy1z zFY>Tu`>=Ub8-|8QW&0=DG0*u7cLwQU17u*1*I37{ie!73udbqYN1~5U ze|n#}Y$LS-OIJ*GBt%qK4!E;g#Z~!bU@nG|%wj@O`T^dd&k&~tV;>rvea*~X$A(eg z{z#4LoWhh)T3^UH^!3o8<=0YIXofbT4)RIv*`Lnu9VRF2f~$hZC`|pvqMda;7iQBUax!uan57m}~3=BzPyPDN~pX;iD$kG9=(}p%Bew_Z*hZ zW|%bIU9M522SMuHaL+l030pqim<;{Kl)jC?SLtEy0~7VUW_&FksBc56ws3t8Ij64? z6{xZPS$>8FL6p_7O*lKyEKk`TXsO3$T72r2KtHzOrI~XvXgSL6Upg7(EFRdzN&Ts$ zPA~vgL+4C8{BnP4{OnzUipjYi%8=0`l#~m(!G1#85{yg?qRC$dDJ>s`5azaJ7#Gxd zm`ojNd6zc&+y|^6g~e>JijWMuNZ}`6H$oy0^B%^82ocxlBM3zdA>2mLVnQ9;NNo`f zC7KVO&b+0qKi^2-06(%g`;*7~VLPo#0-bBVwSt(@e7F&YB0J*#-0&M$hAg%VXs|_+ z_^(0E5z`GgrKB4Nu_1)x>rFZJ}yz?a)iH_D& zOGj5U>wudVniTFoGex)ZZY)xpo)N&kwbhU^L4xL$&eQe+mE$(7AlPpQX5X=(?1WrB zrcK6)B5>oEH)6s20CION(a9W)QVe&*D+ynYt#ehUFv(SY7yj$2--J6X(<`|bLH?xR z?KejVAr>@@ne%g)FrH=^7a6-ClkBUEm%dablyfne>WN+J$udi!YT7fpKY6&eWPgVn zHtd|Mp@D4!C73qfzNTxOZUmQ{Ng##@j?3iUQA!*+aK~0)2Y#)mHF#0! zFx0mO*lQ#te9T%2obpH>BVo}s2`~tl^8_oL(Wbi+me^d#!E_ zv3Zu8@!t953>8W?#&oR+P(a@`Iv6DgEj~dfE~TCMFEy%&^3qnn@J!iuH^V1#n3V}v z*OlM^)$si=Vnv}ndWLu@&1fNs^YbGTeYRNIBW6{63IS~DHKXx;h7{CX811SZ7x&)? z-26k{h8)2#$) zX%g){G!~SHTGIsHH@sf1o}LeiK4cvEe> zZ%m?}tEP-}1r`qw#O&z3@p~L29O?7IN$G<>V=0TVWZPS?Tt4r$3oGi?_t6i#`(vp$JUoV}b{XBN(##c+ry)9Z%ucgdBJ962no=j4bw-?-GI~D^ z5lySZ)*aDO_z%W&_?_}Kz=Zfvn2>d|Q*P&2$3~~z1ivR&$>?DTHR+N3Hy)CUeId8P zDW)MaE(1%uLN7fvjX72!5k&0rVC8Xm8v3_gZrO?UclqddR>f(*ZV_h00^rY2?aa=^ zOJmhsd|O2jGBncXuk*15hvhdtk3fAf_YG*G>Zr*X5>H}a1N^J3RV5Rh z%}?7dwR?|z*rVs|bo7Z4gchH;LAHh~^@?Up@= zXHF@HZSBalN?CMfOlf3dN}+bJSJiVmm&h;oIN50nL95zMv5+-LL$wNzZ&xP zVV!u@)MeEqlyKY~yWXx5XImOo3&o7#-nG^YyYAw7bWR1qP9`~X6KKB|Qr1js=JjP+ z0#OUPxz}8(UcaPjwv-!wR3$6zCgoHxyRV9j3u=qQ{-;;!3>vMeCxTTz#)b$m^RGIE zp+8p1U}A$;jv#QYWDZsC^PBlL z8b=v!qZ;h$a03!%v5~eYBkW^mlYDOs^GeeAi8_b_{^+|AOOk5*uTNErMsh~6RZlMU%7vl@OeWqnwm)*+0qs+JA z*&`9Dn}~qS`*i-H05;K#`tzQqu_CA(UH*QOUm!`nwDd=x$|m^ZO~T7B)otOMEv9K${X=UArO?lYUso$iA#UC02QmKu! zQz|(WqDqzN(G(08*9K)7p#hi-jG(knn_ZMD?FyCN^j@cVo?mp(TQkUs<)9vBlZ~~V z{W#dI0&!r9V6?l{9CVIdY2>+I@+@u^QH>Bq?bBTi&X>^>@}WH;_#l)E1c^h=Eu{EE zIDN&(j*4FmgSl#uTurA!&omH!nl}>x@)FgXjZoJB*|{jUl{yj1ESHcpJv1bZ2#2NL z6t6$Cke!P8##LImSt5X^SG5?4;r$#1dR1n@v1oC_k{(5iyCwHc-f_SsaUoQw*%>qX z1&Y2Qer1AQs#leB`}H5c)Jo+6lf_i~BRR0Oz|)x9l)|Z+cAky26$x~XRaSOEhK?C* zJ=l=ZEe|yOi>&*fNiAYu`vmZ#C6|}jXKzx9XdC_hz5tE%i=kLSCVPpCeu9Xs@wZF; zGCVex^r$ueaH3k7fQZsK#?I811IX;&dqVRaB~TzKdp+qdhA49x>~Nm%>d4U!Bm!5! z-VJ0j7%}2P@b!GSg!b@Zz8WiZzfD*bYT?6)E;IOfN?_RIwt7;k0eP^+z?dfm2aYOa zNg;I7D{oe1)GIO#CBcX^Uuw<;l*1Qo5RdqrB;!ev;--jdZt$i6OBL+|AGAuhFG)#e zOU9m;76jBPXizy--cvCVP89Np#Ki2wO^kT?|9Nr?!<^I@z5?wj)8&uF#l)5CbN67RpfG7KY#qq9n%FV5Po~;fBo=uaOi!SJK|G0qrOa#Hj zXr%B%BrRVF6rI2&HO%#V#4IH$wkb_-y6tJ{I}E&eP%oB7mWCUcY?8?HHcys}KtOX8 z3zr0B!C34}g>hKI#pfy8%R!4ZEN8BLRqa$4#l-TB<1dq}=U>TEp<^0cbfR8?Et(*V zvb})ixUPnmxx%R+&zpNo>b1vZC^?=1Tt;S9RiBCX(Qv7=Q~PB^dH>q1lE41|Yx#(t z9;_*84HVmH*Yq!NAbmI%!LAL&vmYfC&2&GRzIhCK-**ObRsJj=_yw;qFoMYI&)n6= z06QNeD`M`fQ&FH4TYi68d>>l_Jjul1GguN9tqB67=5<4z86UYSEx_VG9t5ASxjb1= zw2$3l$#3ygIo3m;)i%t;81-@Ghx!_<_!(O$-IEoxIH4;X{T2+Vs9&q>q1mVIfNg&@yOJ7k zem?>N3MGr8jY`Ili}$a4nLf5owlot-sy7ydck#bhP3-wt@x^cDX}P+n2$>TI7yUN~ zKTMH7W^?z}j}iqJVIPX{Mk;aKbm&OdCqWwtp1X5-_jrs~aBxtdJCjbaObW&3MsU6G zOHxvu5w9}Oz*zLKudc!n0BZ+&UVp0;z>uHjl~X{kV?q@&ahRM5{V^>5Bo(0va)%c# zaU1ykQQR)N@X*iFobvyk?xpw!G7>@dg-I}_H)>I{Dy(foRas22r^iW2UKoL|NZx6a zG&Cv)u_XRc4M!_TFqr0!98DfSj8p}oyPl{Qs74Br#v&}*WwyACLN|`p{G0>Ba`L$J zKun%i`)zYM^9A34!us#|XE+v?Yh$R!N#c9&=C7>Iaq6guN5L<0OkbdLX{{NH*P5J3 zorc8wzqI!>h!ZD9>k`Q0<>BDwni};FD+FO@ksAr3f|*e*hFpj^-{Yoyy2ihgpLLaH z)1hmhS>tQrZ)IJ>jB@3yAFQSNk+d@(la#Z3HQl@w)8WeNR!&;J|2%}m3qHNQ*Kc9eE7N6vJBk)JJ z+U4u7$SVrVr*;0E^HHM+A=Xa5@cL|G30Yj>yxd=pZk%WgA_zX*(iomCpT|Frt5b;#frL-Pf zMC92Qv%9H6`dlH)uMrm|LtYo>Nrx7~Jt3YAR;JpvId;a5Ya-cG{C?bbRl?77Wr*$@ z3eajetGtZ5>XRlL>uOwm)+m&GHHsuJgYq5Yk3xBr-i@bUNB;>lpMhxs+()to7M%lk1>DKm9Lt!v9@B3$7?Oa^keWgI_QutHyTLNp6$t^ycNrH*L zuWMoVcE`0b6N5SC?(c_xrMzs^X=n3nIJIxn;eWRG*W(ozy^z5oByXZ((x_#Qltn#f zy`^;h;uh{HU;Wtz_rx@MnOEYw8vLp*`p&GvILH42KNyh3jhuYvob^>LLc84xIO^OI>>O8_ zTD?{uwipNYPo=GYwsrHw-Np_3>N{Jn$XoQ zAz>ZAtk+=SleK|-9>+44w%4esr&9;p)##hrWc?vLK^iNh>?)y( zh);dO%^ubEgGZ9vcoW#O!=K%G*ff6TjB|iHK{vonO3t_MRu?FJNF*bed)ws1&{h`M z&-(IgrO;@=R&a&I^+nbL$~vrRk7pc)9%kA}+B%~8(^&3OsV)c(lwBaBPnz`RRnEdrm2M`NfVKl`j(bJV~0Bxl-bsvI*ewQPGFu9VEx6jwpA)!5qF`C9|hedUt_?Y5pia!!~Vv)N|Mo_{?^gt)L5Wg3tW+}a|hvp6f&4E zA_L;8>B;orMtTu5nei^kp^k~hVKsyvpdYrGv_(yx@%s6@T$`9kyf1px9N6- z1jC546yth@FNId^<+DC;Cem&XSuXOWp+*YTh2}>fo7@OLJcWJT>L}i1$W2hOEarq$Q%CQ6@)Yn;S)%!JKcGAQx{yK|afrCB>7iAqpb21=tu zlsdrXHV}m0a$e!HT4to>_nd(R>7avj(uxv_LnTE4(bAERXqdHzzzs%-4+5QAo@P2B z5fLK|p4=&wZKe2g>O^$OEG@;&A#CJ(M%YLC1m%O}ifkbLmrL~fbpe&G`jZzly0zKWcL>#L?Ut1XAFUN&t%)w6y79-?$!N)N3L=v0XAUW|mRiM~(EetWXFGbdZpGD0Zu=y^Ot;^x_Gy!4 zZ~JA6FRHRfRPO@_-4|^$e^$N_`B)Y%_5D6fCI-Ks@-kw9!;xyKc+6?DYLG7cpB{nL z8i(7T4CX5e7YAW5&VaZaVt9t^ULYTM=1sUpMO14DY9{K!n`08%amu=;3{@}7+|XeJ zy1R4)?o{XadsjW(A52xT$5sygTqAE0656tySumXvSrII0r#rqA1-j7_$BhFbMJ_3Us2*JQ=IoOv55Nw=-RA%D#o@K~VupMh6za4$T!;F=VdPMLjbibuFau&@$vF$nxDTP;Z zcuo2Bd-tr#Lk!qO8vH_t7uarBL`63`VMKwTevWWRKP%%YV#J5{`6=8GBN%}gVmCJk zw-!=jRdv0QmR6l5I}_ua=)%r!VnnW?IHoIiIdl)H!$*V_2Pn+4Q#;d-b2{ErIiC^f z@1x15ybnn~C%GCWC)tg23&Q$9orpTYxPb>Uqt2AvL5M&9=FZ~!FH_bNMrZ>!U+UIO z;gY^cng8r8s-s>%y>IL(4SqhKZ{5#525GaB=_WkazSsZ@kUzkGVKqy!=Z_nysPzf`W)K$`w003frU#LFc%RU zcXqicp_F+DzPE-pDYZCMU4$BwY#fm`EFtkNMPEs3yM(9OLlq)5noxr(J2m0AnNGGg z+>mK!tW)}9-lxkvZLoYN&~z(z-Lf;7 zLeLyBx5b3P>=T%28tNQI9$70M8-6*_n#;F^JJ2T(t7sRI!TFpK_6fz`uzxL1_~RAk zt`r>-8i@PY5iZm)!eM3u6=S2!7S-7hyh6R+IVhY8JA)4`&&&;PxaW47d>CUkl=}w~ z)8_blX?>V(rs6`=7tzX~_)Vuf z_rE6>h08#)vsbVkcuw?eJVB^pL24g*&5vw3W&|o)$v{D9LK$h<)8;+WJ$%-$CGj{E z(I(ODBV8-XwRo{68llB_Ay{j5sTL!-#CymN-+w!L8Pg%zL#%@KC*G5HOw>-WT@6bl zXDBs$D7c=Yx$H@dMbKyHrwB)7YegM23T`w!hI1oJWsoerp^K!+BFQAE-BIHN;SzHN z;n*Xp{b`?TEOKz^0tsWJod+?8ws`b%xl>fOgh2n z%9=)ht0IiGeS{Ys+?vwZBdp-I#=56Ig%=&+nuvs9tJwQ`(Abc%J9^xBHEdOKhx(ZM zFHyB3Q3w`es9@>v_J&a>E67XGUk@)VQ2a^0(wyEbQQ1o0`kN!s_@?#o`uMiB*=6g} zDkwwcPkcyS{AEx{0F`Z3n0pWX^zE*2c^U2e)WLtY4u(6La;PB8{SA~+i-ZbxQ{3n;Zts0*=B4FZ`-s;JWGGC@{?GOfcQ(I`+)Sc z0Iq0MbjjGGAtN{8Y{u1B)6~UH`@b+L1EslYGa`jen_=t&!yMbZ?~-K7$&A`ljoY(E z;>{iWY3+u8mDvoCW?q`-%UX3Jq7wJU$XSt~-uw1H#2B0|MCprEt}KXpcf|9?JF?t! z--fD(<^TJZsKx?oxyH&VFnEq1TPTM!*?A03QJ$k zD+kY60u@)YOlfmqKtfB@CM3(vZPI+yXZvz)fY2;;;RI6~26k&*1O zX_yxnDc~s|dN_c>c8<6bNyJw9Pt`K7R7{M7<|9|#gk&0rxdSh7e^ns7m!V!q3ng4; zWc>9nZU4$k0RPhBsy5Ul>LQeX;_iLa@iL)yF@_@^%>LFI7&;O(DEIOQ?is_vbZt9+?JbyWx}O zPD{>PS!b$d);?QbK0O^dDuy)^>6Zmpl_rKX`j2J5Juj0~;=_N`Ws!%B&*je&7D3C} zY5O*^$u$-o{~k3QQo!jcB)CLPpB9hMF0h3;I_Mh9pB`-6-jPvM7W=0Ps~%4p6K%N0 z5#kktIlHr)3Ks9m-wmH1yUHVf?*5zWWZT3F)^BDkj0rJXtHnTw__Ate<@aTdd@HD= z`JJR-l5-Ur10K=?C49u}ja>6m>Qv-)%7GN7k!SFpaLRD0cRep#(rIj)S{@q0_guxF zy51^sEO%;4H>~KOMCy&N*}RTCB96pdkL{b%voI&p1}xae5GRRFuc(&#qoi+cpors{ z0q_rHkL%E|^+f~IT>KsVSNUDyXcP}fGnp~6NP!JVKNrG5;p+A1H6IIRGa_HTL$fq) zslpII^aa_>CY>E#eg5mp4d7Kfws!Az+2l(D*DABh9@-v_WN zx>9;(6ZZ+@X#89%6Mv|n4KIseDn*s)8P9x{xg+} z3)aH_fxEh!!kCj6iI^5D-yWgkM^TI;0`1-YCV-A@{FaGRPz!OGSno=n7{TEEWWa#{ zJ@Q)AHKP2nXm3JO-8sgjXK{T_gSg7C6<&d$y*D64P=OcmzKSL-%NDiWx^{zGxnc3e z*?Bv!e^Oc%TJkI2Bc3r28oy8^NI@Ve2a}CCY698@+Vt@{TLEpJEvoR-nMI;~rYRAu zgN0a5M>GM>4tHPEvxx$sSPO3kRwx8TLw&VHT#Kgguwu-6Bnti5z#4*W+Bc7SWH1!t zlMp8cZS`#uE2LTwjRLl>JUw;W-t2RnJ%Z8<&&rm(gWIpm{9C~Va-9^k#5$^1v2EkG z%v|S;h+PPBHXoIAwGL}f5!J{%bk)uMY+lZyULK)BRi{!f8ye zBI}myoRx|jfhi6#4@{@W1S>>);5h`|Ak0r*L@Y`JB2L;5o=MY z?7qR$LE!DHEZ=XOuVqoS2D`B^&(F$RM+=gWqAoeFGWI zUc(90(`Zt}=msQNKI#t3qNv0qL6jm^v~$7!LosBI>S!BxhrP>u+6~fN62|sXloY#R zzswl9Ysw*L1MXjjbk-X;3AzWDLGXy(2(}95w`6DTe@SV(5*wku!G*!A!A^Q4E$)(j z8Ao7`;}}F=>N2ayMLd3E?v1^K*lMd8+T|tZ)7*m<+TaMx$qF>TQcSiJPgTSCR2vmv0+w*^Hq{njxHLsz5*#B zeT{1>_r0wAtuO_;9|uRuID7YdzlSeONZdsXGdt~X#!{jePlMnRUMDGq%@#Y4%DF{; zY`8n8>vhl=KFdNwICOi{-qi4^_PHGjJ98vg9t|H`&6t7ZzvUDfvzP#XK! zSUMKx2SXdg6fY9BI4)|A!`r54Jxf|c-Ho@7m6QF zo0ld!+{{SgzB`|(Lha1%SOaho#r4> z6y`Nh;{WpRh4684FdlKm3h=|@X9h!Ce?Mj5nMTlTGRBt?Xxzn`W7d(bwEu_bd{lYLhX(T_)_3lsy! zC&tR*C;mfm>c8J~31Jk3A1k;%zPK+9(5DQ7%u%QPdBrUWvG4fZP=byVx+KaV8BkI?AK!*!>`(uxVXt_H;o{) zB~Du~-6YLd>yi&?fn|F9NMvr*o$3MjS#_pgYMABkA*;GAwG5ioBxrwM`S*I&r;(a_ zr8^hZq=)^cT-BE*S0lSYu_cS{gMCdg&$qgN6`RoS1|F$@5_cVwSP|YV@W`S^`z(Hk z(DRD7n8z;u3rPQ|GO4RVQGZ`KI%T@{GxsmEy9BP5Ltpyrp@`>1@lYZS;C}KY-WNAt_iRS=?~#QL#Oj_mXeK=I*POpiA`onPB(tRg&9@Zln|MBiP+)=*6{_4Ec-|c4 z5^b~5jHrB0;2}^^cf%_@8$IfnLgL^(RQ3-^oR+%Hd3DQd9rUj$Z^^ zSSKdU&<|&t5s3)6YiC6fJ>g8BuX=nm@We64O%QMq z<9}}9Dl?cvB?rl!!qv=6m9zQ7;njp<+sXUvP9O1-yK_TRY}3LW`+^TkSVc?0;rjHQ zoscgeh(L6ek9No=nQxbkF@C~X_;#RC@O1~vabHjb%j;$h!NIbBmH4(a`Ps#(jD#5j zNv-b_Z&D4RGfKA~_a>CCSTUtq)Ht&y*j{&HF+8MO%5p472!X)WAm|*CW_y*l{I5T3 zZ@9TY;dk}X2G-i>9B9hK@g&>41GBSR?n|Y$YOKu86OK?P2(YD z{GJysnho zuxJ&kE|gbmK`0dV8(+4iF}VwK18U?p=D{_cI{gSjb2MhZssjbN?Ke;It)FyS?4TDO zFKp;RVRAyXTttgiKfzwBRJ69s$&Ji&JhdPkTUhc({8(?(V2Kt4YP!4e`BOYiV{_K$ zx2o{yxlwJ#1Sv-}%rr^Hh|OQYBiB`YNvz-e zTv+vD%A}npsg(V(@M-$nWGYRC0JL-pcr(pg;JTMT#{>h(Yac6A=j0|_83Ho4NTq%y?83TPO zYzk~R=P?^v-^W~k?-|+dMjd1E5!vzSf}w5Aa%Cj(7mlv+1wt)H8DZ1Mf7jn(b4uIw zp__h(FAqmgNoGnt$gU+)2Xxe6DpE`Cx#8LOmED%*APX@^5}mZ|$@H*^QzYrW(RF}0*khh<%s}0MKTZS~|cHuokLH^Gb+KolDX4dwLO5I5<`Q?zeGM&vb5|=#Fohjv5 zRtu=L$#GygU-4qEwb-Sr^M2fK2H3Wo?1xv!XV2V6PK!#p4JW*G{(T3F5`l$!wVY$8 zzaZUSBP5RVK4M9WV7x)eZ%!n%=1cMzXKRt4s@K4Q)GnWI&ih*vwWwRw{vcVRBmO9U zz!h5_)%<8yGaQJpagAttw+LNB9(T91bVTn{Of#rHn-Mr4X_EL7`%YtLoS#6m_IJ2~ z7pj8E@ii~V7MV`tnnM{v$Z=*Yh4%BJ-92U@FPW-7NnfM%H*Zj?29BAWo9@ z-vM~S`YZYV)b}`6jb8|H?_@y5rgswdNhxhyhMzRx!U(qYV%|TQ@LhWk^u-Qwdw7OMzn*$a)-JFaGBX>ue>J&?q<34CAY7Mo7zzV~UapB52% zc-j~JCvwH&ph=4vIwW9|45suCrV1#^=uYI;!szE$;JYH7>nB^p0OLUMINDQ3D+OkH z1%PJh7FP?a3%wuvWBYDNVa z|F@s={+~N&#}NU-0?*jhIj=|IUbJ;CM-DVyFIL#BQC7-%FJsWCzjjtQ;#H@&NdkvB zTZGyn-pm=H$GaK>gY8ccSc>y|Ge5c1W1g(W3rCN5JaCfEh1nf)Gbc#nI=7yf2)4TB z49sf7z^uB-lD+^h*VG`w8z~eNjjOCzVVv$|tRpiy+ZJj&&aafTp%1WzMZlQv0t?+q z1I!_4kL77X5F|F1)=qL9=g(q=Fi`fWBZo&8A%Xn)z|$r1QRPYSPy%RY9T##86!W+k zMonNRA(lE1$0WI)z(;9fDsC2+6#Zs~zfitJr) zlQs%xmoX2Lc3(c;01MZg7As3+K7(Y|nx*{C(X@#m_bjJIgwAV2Z>ul!L6!yr2m=L+ zfMEJ-qJZvETucod&JGPa6y*ESQzVKYwLF(}-#gS-H?XY_OyRE;{q8vB3+(NA&;#U; z9DMC2CCuZncUE!^g1|jC{=mH5mGqmJ^<*}3JePe9a z)|f=JH_#50Gfj=4SBk~AAs#G?Dfu{*HCu_C#Vq`e>AT!S-~MXk?nqY}&wv5KTM%R+ z9zizXrNcg=5ITmy`R}KW;pBy~z6QuecE!Fe=)NUK^u;};@UpiU4FM76A0EsWtp!PN zheY)?u9S_H0R?i1^g%Zu2CA4bYhH@;GxrHtS{97f=Rd)`n<;qj$r<)hJ@dJH&-6C_ zuEz_vXNT78Sx@Jl?BPK;RtWmae4yJt246NPy|6bvSoI%c!F8z~ndxxlJ%Lm3SwvtE z@zVM2Y%~Cv#duN+I>|Cmhyn|O$M9be9dlYRowWN$ay|+%%V{tu==@64J9^a+u6t+w z!gcq(wI{G+@_fa1$I>&j{em$YpMSlYUg;N1sSLj973Rs~>9)?fjk_sWd!W_r92kch zJplrGt|B$Mm&YYOy6Wg{oT8=2A_wHa?j!D+v+Sw-6%&Rs7N+G%<05qygF|G7_0N^! zM(0x&NA?iYrMXuU=h(|nPbZNG7kv5O0&I0kmSTzfyS3T}^$9{0_jB;pOh z&@fK{yvkYMqz&$sX8&!h^^yacQx%PZ?)kwl!TE$Mw{(Ed8mhj}ZX| z#j>8v;tLrd_Zx>rsJ~g&tMeh>3Wx0ElCUt4Zd37VUr?N54|@V;a5z4X;3$2UVbE3Y zrJj9dqtUr$9rm~$jSIFB$z#=sH9bo~j>(bCTD^pCw4{-a@Nnfcil#X=v%a8aHC5jw zN#+^@rX`L&-89`fO$oeF>v&Lh!u3^Ls6;+QGv`)Hj_@MekSpkvMsmY-N&LO*o)VK5 z87+-JvFLpds2W;~fX^$yPMJdPYA9xZ8~kfg%y=LEtH0Xgckm%z6@z3xe9 z#klN^U{=-rp3UnPmKTo9a-5SrdWdjj^WG6Lb03C?VygNTZPIci2!=~#kZKyHlhxdP z-@rebXw^0n^UJGt7}_p*hD{DQYZzPY=f@~m5fyz7WQQ3^55rQk_Iq4tuuDFP0b5>m z)8~6-e?(|+x%dZ*-aY|6VL?TgPrY>ful!!=HAVox>7e@wa zVL*4E0@Ge8+y<8Qg<5qOPQI4FkQrHq%{BX7vq)K#p-@KG(#9%1ViV+VHx^D$eq1>r4%YqvT^u_j}XDUu^zdeR;v zH3FT&&bgr;<6W|ID8`1|8J1`Q)g0%Ka@q*ce#b5;Rvg=jEy5UUISL|ag9?;0!d|6E zH3>PUI?DqqZ~Dgm4*VB_iwUBOM;dpt+GFJ(=@WW!RMLJ{z9%0RF?t zC@@ZV+uJW|;A}cVN0g4VoU_wip8&ytMYyY5x>eZkHh&rxup4G}$7=0f72>rgCWs0; zSDAH({|!ohrFD)Sl=GqZ9GlGFGDzO)L+8q3b4a5EU0U>Yk#+AcJ26+w4}o$qQyzn< zk;)^twQWmCVbGau7C$p<-=Aw1cVN@>4ZJ}VD|D~0B!upK-wzHELUa4Qtio&n9o(*{ z7kN16{tz&48z{Cv)Q>RI^?156_mQgHV|N2BfHqof6D7Zr&bRn0;xglOi9iriY59y` z%n>bK$SRxrNL>QtkFbtgHv@kl1>rhEenbf#1%lcDof%L{&mSrL=v+hwb^+l#q9B1l zV6LbS30haI=v_nQsKOBx&eT?*(Xx79Y_1-XK7sQ{hL{;t^xV0_b~k)bq2(7VEk2mh za2_C>j%xc-3OE`a?UW)!#<$c(G#W-z^zTHUL3+iYX?-qeAZ%)b`cI*I)`cGXq{90q z1y~?zuW|2&Iea&(kIvx=S5$W>C%EhA7qRY6v`9!$5I+(a8l<+_*>I(_LZF?p={n^a zjLrYK8lhi}IQ>G~BYta~YxE%*)OsGL1%)D0_+#GGPA7{crBee7{fL>MwAomsguBAL zc&5Ft!0^A{G!Tc;{2i2}{o%F4Y$P{?dxkvYTJK{qhn699QZ%fX*L}>u(yadt+P-@g z{Yl$c%13m(EVjeZPg1;HN3j-nr)uh!j8_;lGriY>n1Sm65Im>wKxz3UwcA)0W0>p- zK}?FY_~vG$+%M0gxV0z7J6;Wg!K`mJR$*Mx$VR49yesC1v%c~E^ljX{I3*%zwysqJ zfW?9YOz!T`Y&;s5fkAU(hsK{e9LCuC@q(U*q7UCQ3fRdBq@HUbLh@e58fzjnMeCrr z1mJ0=>vl;L^;8f}J@#XG`!%PCPD$vt%sh8wKrGqcl)J+K=SzSBRj!I$e0=K*g)FAU z1>Y6@EbSA{c3eJTau_cALwfQMSkV=i0B!bh9G-c?v-`S#rEKt{Ot4B)pXQquHnm%8 z4OI8=;$>p(FjN1ODRx!g7;U_z@dajWGm8xQ6Bt2MNOR0k&Ygf)VdbI-JU(En(Jl#-ZD(vQw&;ki&P;tXaWGQp6dNRefbWNrkYLM!KLgb^YzbkW$ucu(pPEf<-NwtfZ?K=X`zLD&vxv% zkXdW{6W?hAlsWftL6^XLZ+erML1skdHZ2MrK7j z+m19B9kJ3f>;yO>0~LSbi2wE^fYT^7py^^)tQQuX1x3{$AWJ58Mq-^R1D{RrFHlA5 zZXKDrW)0J)1rEE`Ao8^S+6^yMU`eFF%1IRw__}n5aBtP39u!R<=G5F)c`+F*Lqi-j z3|8sc@*SZsr!9Xh37klr-}&Rl-H?D0t)ZnOosSQN82+*db^XXt+vOOWAzdpjUFDzP zyS>5_yw?D?7Hw*5rpF7j{;-_qVbQv4K zyAgU}Gk=qM!LipK`_nZwyqa_T`Ms)t0Y&=x%*u_!2JW*4%xHN(ZvVGuFD4q$Emx=H zp^@1IEqHii%gNzt1@MDJwIaStDZ5=qAw~$oCIQcGxw+{h3x81(LjER;rki*ls#$1d;y}yQUQ|CnaZH~_JxIsY-;$yDjQ$X@Yami@L@a2d_ z=*|*_`OE)A3JSGIM9710NV&tjI0Jnr5auBKzL{b{RsdMT%GFBLwGo%1-|6Vckb7G4 z74tw|j-SL=Fd0PtmNX}UX1-lxVK{Lut7yx(<6Zn9?zirn1;g1v64S42yG?SG43n&* zhM#9&u1-(Uh;^?@YZY{R7*MPDRaI3j4?iHtN&K6lS=EEdEyuDWGU)eScihxrNyWsV zkO)h-Fth!cWmzOLp?DEP(V-WU`3LF82A+USC+*aAZ@s7NkeZrm<#o4NAexN=@ZB=O z*bH9m@hWXq35l@qBmTD)*{;0s6|n1nHD;K@xV-Q6x5|p*qJR};-t8J%fNOz2mqUCw zmdp40FyB`VrVPh-z%icf{dSOX_%X*0kaB>;o!NlD-xOO4jYeDUo=nd6d}D2TuzlVW z&zQ#LF|JFjhwAJV&@ z7|8`3zfNi+4<|$s(IPKY>ue!L|MPJ(8U_+n8jg>~*HGfC+H2-xg#`Rt&H!XO^3R&b zMm|~QACtsRTHD%qxN_J7sf>(rJ|MV>2=0G%U6q@A8_U`xw&OK|ydH}kJoA!ZF`I#_ zpE)v7^&2Bu6dOUUy&-IcSACs9mM_KFf*bX=JteR+05@WUt-T6MB{jIYqQC%QHiU2YG zp{FQy;y`DNvv^EoEonpnF#(j*pZ5YLzF>tZ-ob`}@=I$+l^Q8-R3o zcD1Vtq#79jfS0j?9d%(;y{V+b3uCmgPYQ^og{P^THsMt65i;@AR2)FUfNO`U(2OjN zA8!`FLl%xs+ZB+Fky;46O;?_;I#*iC-|x4M&_%a=xxw{651s|C)#{}HX9suZAYXT7 zKmdRgP6#iS?{(Ak(p$QHsZgUExTp~^*B=?TPl#2{9>&A&?9K~YwAi6)1IP5*!duY zB`=9Hr<8L+lUm$2y|0c}-*TZycma>l*cX#JZ6CJFK8%jn`5B4uZtcZD{SGtyMyEFX zbc^9>V^#Yz&Nb5O3(qO7Nk)k3x{9UwFB>UIr*q8g z)w_;ffrslmGxApg(77mmvpmzCQfn zCCHH&8e5Eq-Mx=G{HyDQ=LV878Vg^P=bvsr6*3+rOjkAiu^5!c&&M(DY9Zqb-tiIf z5!t8$=|}q9Qf%-ViR;98UxpW&KTPCy1z2+W^`+k-TRB9vdE7k%Ay^FLLhkT(Cz~HB zqoZIFa}WR&IyV5QvSnNExoo&OmY#^9d*O8G29X-F&5OI4AB_po2LiPAT1?l=<4&GnFOtMaGP&gq%VYQFM%Hj+9{0<;?3V~oP7aQnA!wp`Y zn)`;I2T2C8gczRb=B+QQUFgV}J#BnGgH4VM(!z_Sh)NBK2mbLoA35^=o`;KeoV`b? z8G!IQ;|v7p7`y!}xCJeu6TE8i!c^D!UCeAX*^RUQ<{$9fRAju`U~{a152^Xr4yeol zlEEqa(oPE#BhL(|I5ztX4ryqDLqCvb5S|bel9dxhSOzXaAwkT_LekN=z*5cyZ$1kG zw;80>#W3(OQFwd?s(qX{LVyy9o2$w!$!clEP!)EIegr4Qxsq9>zkbwHI?-s}>t5`k zpR{5u`VkVNc~gbs`0#{th9~l$=_}n#_tsv9aXR_vaLd4m#zDX7T>^Y52_R7ptxFV$ zo0~s-fed?Z^MJIq)%{RRW+{MXbvpNhhcrx-Cm?SV^zm8xZH|p+(V#W!9FVqQNa`Og zJPA+<8=UYgrpbO=H&4Bfa^sIw`bMcMK&Pc)8 z{RRynGrJbu8HhZ-d&2)`yUz!NxlLcR1AMo{o09*APi*wLcj6z3{5J<_?Z0WNFF}cR_iMcA`roRWb@#d{94i{0sn_HdLldmzTa!HS z$rj(tYl86K^d?{Q17SP@KqS%%G^u&heD*ioA34qubFWmLTy2J>hJ81(^S`Hb#CYD# z17c5Te+o~>N4j6$VKUdyWFyM#Z|S19tFFnwH>9@Mdrq-&C$pmTO?erfb42SAW@^fI zeeciaH#Y{>_uLBIxTXH<#^OAO?ja+vzcmR&A)0npb}T<9DRM6T@oLQaXY%#9V{@bgjOWzkKR+KkJRNBZ@r%q)+*~ryTo5j_?GKk?qIESOn`4b1oXl!$U8&C zkZzUBc7_24xLmDlsW&}$E|Q>4v)eUOdpxhHsyrD`jVXsEHOEq% zNq=5;C^d?rjK+aimU5LK5sucNljTbQj4)QFtY1_Uk>D@BV8o~a(c~t!gkXq!<@ggw zp}ONzq3Ztj*hIZ06{lNv$te@thgV;zKiG5`X+h>^t$)15D4}UBs)WWC4K{)32GWN8;Q9zVS=M zEL(d;WN8&lI(hNmA1>?P%?S5Q*h z`nj0MQORB`wduGMD}vXGzTl0iD0V@%>|1e z=S9^8*6YS8Y_L;J9VYR2Vcis>QpLE(SN0$Oevd8L??$IUf3C`}`?J%k<2zSZQ`{%T zRE=nlvy?IuHv;!h=zQMn!w+j^^?#dATnP0`Ev+-Xd{?}+^L=lH)uMtX68feuF{)V_ z`KS~qMQAQ6tOoKf(Jf-wQ2O6(wHoXdl7?bX+r4CxvSkMt>$loER`3eA>U0!Q+*CH) z1b&i`VXQ0XvXK%+ueclfO=eGLKfJtP(4ajSQjIV5|L<0S@mf)3J9iq|C#W5(_Gq7KJ@5i`Y!qm3$NRLZ9zr++Iz4ZtejS?EM8Ke1u*GYf$RTCn*uGo4ThqBazQQ(!e)ky_Zxff%rsXcGYMvIobg@ z)m}~q&T9C^-$T+qaLt)&!aHq?Dhzh%<{gp`F%s2gleG43!$2DA{>GwZGk&^AY z>Ra8M&pP8^L-CJ4d?649Uw%B6;s?4!Cz)cl=qYEf6TyJLit@+_g}*x&Z=OhNJYQCy5fMT zSeLw$<$QckVk65;v+&%i&e}u+zQh8En)Uc-&cD-%{^2XjC{Sfvfa|ZgNPhonpleq) zH9uxRnlLp#M}f~cQzgIOkcHKtYjVoT$X3@G%>QFX4_%UnJt%qP%)`Gf1|*eY~CO z3?^40GgLMev~u(KOd$gGRR}DQEfXKW=B#IH#!4V`wkL8>vr3Cl7Y&k4l!#CU<62_W zA(I8epgW$*{iZSAqyun`_j(5}kf9h@i{9bCL7O2>f&sE}ZNW8;T>E`D-BC06TMl6Gi zwW|C$aW5sRLnKRr4I4&&A|pMA$zYLQxme&v^L@3bl>w?39e6fZ0M0%z6L%wMY>@S~ z%H-`|dtoNVqHG{DP;I=iH&d`?R2w!Nq2 z%;{kSg4K?!xr_-7~nQ^U(0V+rRwv4LroD)!X5qw6E(vW@x0> z^?A>qY&~?=LAmbI_tEmVeCNhG52WP6zM$DWu9X^Kg4I37J+9zZc+_?BhwTSCu`R^N zyyf-SR{Z8vaQqDlnJ@~q82+m+Rk0mlCFU{_%~%I80E_;gx}Pgwdg?Cn#``S9O9JIa ztUfG6s1v|(5E2^x%$VCA*tIENBtW=tS9M^+e!>3LvZU>}fg_%)`rK$SPnpxcrJt+A z>Y|l-*+p>*dnKtGbx6Q?4vlx@`3RBf&1>3)Z%bO?2V~X3hssb5x6vjA@E+n>6Awt< z<748U5AKpa_a%^?_hWXDY4Y=z-;ySe4sUOK+C(Ft(rkPrP#9#~5W`WhC&Rtt!$ES( z>o{|+*)n!{s7pFX6W>;hRd*7Udnqvyh5$lMB(co{9$uzR?Ylq?gz64>$CA2%-_G^W z0p`|n7Y>at5FJx4c zd=NHJX1s}JrF{>i;E)URk6JMfEX$%Wd^yT5J+hwVCV&RYt6QlaFg|}*^<&i~+Gyhu zd-`IsycaeHb896urehrt9nX*wn3`H!(%YYcUc?*S zACUV|^23NcM_N`H{KXr%#(^#ByYcNU%;mYJ!*A&p~qDljU`7ydQjJKgG@~ zM2xYL+X63oU<6jg{MizDESuf6IQhfeG~$&-@}2JnpCj;{$lCmv`n(cKY%mfy(u+!C z)P~`x{mPp-p}4j65*(R#VMP>xdPZ81V&rUg-+QSxv+DG~Fr6)^YlQebuFce2fm)Ck zjv%)$w)6GQ3^x}Mic0lm=FuJQVNnncE7ty6Oq4KEDns-3+Mb}}*&}AgS}OptqJt#L zwK$b-_E9wJt?qAbQ6OQL62|bwvD*Wm7j%ddp%h8OxpfbiBUBMFR6pCxZ#fd^#OOdtMD%!UgCRanH2qxh$<6a^D6t9c z<%h0rFU7r=g~2%>k(In9x;7Jgs5LBz9ULqcn!;nnR+rjYfE_(_=HY#I<+-*MfbhHryxZFw)v*n$d7 zlWj9^3P;j0dzV#42|crR9b{x=yI(`Gr+huz>8+P7X$w(cd`ns{(<~y-4#zb?KDVQJ zF$#8{T!vg>e4z;vEa z^9-2soER$!B=yYDsNjV3UeeTmU)mD9FL8XTz7-cE@y~COV|tt~nQ?ej4Xk?%tXHfl z%5_K>|0&g#`q8Jk7p6Qr7VA0b)IzY+1Pi$tX4Ek-ThX^TUZNTM=oRjP{XU__i%zH{ zGj!O?%gb)IP}=Tbrl8=Sh@@h>`mjWb0G~_!^_AR{gz>f5-&_Aj^83odO9HI!9nm-8 z{eH$7)jxqO`uV-dY;AS*L?63!-%=w3dJH)|=^YN+nt1`nH*YjKj9S#VZn3k~V_rf1 zceM&5bb}7li1GO*oUOlDWAWT5_rObGfdX=;2w{t~EJiuh*}BFEnH_VqV;$ zH=xCk`%iku&302YK6ExD0(N+KSil?gN5iyx^p8z$Cbu_|gi}lfrl&7sycq>s+7$8iGV%+N>%hVJw|Ax~kK9l4W8 zXamun>CYyy0epNPe39kV$nLT!~US2v2{NhJ6cShN5XZ zthN1IeRqv`|8=9Ju>i(U9)Xo!s`j?f?RdfN@6&x2utkD?ao;gOXM)RU@Zb2g7C9R; zN2DimKb!DwD%Gg~nPZ-!R!VyMaRSkSeqd3Iyy7aY#cLH8e= zRVT2o=2_#KfItXokLZY}GiB&gm{s~0FYU!KYE6?+jLwigC-5q#`N3eR<2+&7BgP^) zTSWZr*x1oz8JnOoHQH2$KpEcbJ7sT+VFC{ao!$4Z}fX~ufKkf-&(mAWQLA%GAI`7 zGd-FA`}=&;ffmBR$H%vYG;sWEvM$?@Wn?0;?KZVeI=+s(eyu@{|L4+x_J09w2a@;| zEfi(0kn$4?FM+VnVXv(ZJ6_0?Je=C{3bFbIz+__T=hf@R* zC=sq?>2(SS;cYl)O7os@pnGoBs#R+4+_|b>zkW$y0wh`pD_X^1zlr#UcpzROs(1y& zE|qR(jweERsX6<&e*L;yzI?eZNNV5`hJswT-Lw)E5F)yWw3B9;5PKDT1?RBrcg~zS znjJcHNLm4j^ZNOgoNsIh%WoBxD-M3)D}EpGzrUuYM(-53W5*72SuR`;B;ppz^?X9> zKmj2dW(?ny=3U_k98Z}tMT^Mn*|RAi>?2|NfM|dyfan0rawmv=s-qR4fDla=5_luO z752q1UI+;0N6bpUG+68-Y<)m5FwX(;aBLCcR^OM$TR7?|AVl+uNGEB2t(;Qv*nfT2 ztXXR2%$aJ!gbBPrL}+m2$Pu-B_inXo*Dh6C8`+)%D-nMrHt8X+5Cw#Ic|y`pnqLaB z{8x4C*ij2fPEL**H*Oq%@v@J=_Hq39aaCMgtcByzqet#jSdRM>u|?N7x+x&Us}qvJ z()?WbO&oXX)Jc0I)2B~Y6DLlj!D1hQ=>s0eo;`cCcszUCWFIZn$AJR})ZV>&RY^&Sdi?mYdwQ$z7h-$<$+aho=6w1_}Vz*u#*XcXB;bM%3w%@7O_;_>X+Gxw=K34cZ5pnwp;77}bU z@&n;=_Z&Qtf`S6oty?!5MfUO1`nY@duFB8P*Pe%gKM3CuTT{W&NdY0kBP4yL`G)X& z!hXYt57*0nr%jut1`i%gqsTs*t&a;AE~tI`_UUE0*j8463Gv?v|1379ildPNLWEaH zP?fP*I10zTd-v8J37DOoO{2&@8r?@}X{q)$z#BJixTjAEbHuh(bM#R_h{O?+9BEbw zbDd)~dGh24QEQ7vk$uE^&9!US)WL%XBQ$KtPT_lUT+Go%0U;7sNQOzXQaK5Uz<>b* z)TBw1bQNGM0mg2LG`#G?1t!KJ7KaZXR)-E9Qdh2AF`q^P;QL}r&T@27K#1fK5>#bG z^ zZB+fHR*O}AIeCpT&efrSwCSV_!1Yf&$tu8lm^X5(SX_QmnBrFko z@r;DcDkzvY`Le2C*)`989!rGN($X{s4jib7bDcYPPHF)`IO@`+OX~9F%et+stklF( zTMukk^(&@6;T#4*0U;7qOnOLzZ1~~AEMcZ2F8>FJ2QuU#Bq*~2A{5rQZ{LKgDvT<& zRaI3w^n_9&AVdNg-wz%<@OsN9!gInhA*vUim9c!AcNakcA(By4UYBNw!wiRSc)hl5 z+qP=-=+UZY&z^dvSC1Y&G`n~2uIE-hO{|c*b?cTodGe%s z`t+&apExQ+DfDvTc~Hjfr@U(j3J5{?iwb7iv9}4tWuVg{6gEEE`ye35QGo~J5-Y&k zw{Ner>(RDr*G|6MHCaL?TLOr)@99 z>rxE16}EPW{eM5_{7DRwe+!>GM69XKVXY$!HNr>22TqIhZY+y}+W`-vTE^NfemevO zgdhllNP2vpmw+G$f}ns91VIoK5P~2Gf&xMi1VK Date: Thu, 24 Nov 2022 15:41:43 +0100 Subject: [PATCH 290/409] celaction rename variant to current --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 7f375a0a20..aa1d7387d0 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1268,7 +1268,7 @@ "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "local": { + "current": { "enabled": true, "variant_label": "Local", "use_python_2": false, From 0f1995e2972e526bf3f3f2c8e6c53363bd040264 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:54:37 +0100 Subject: [PATCH 291/409] celaction current variant settings --- openpype/settings/defaults/system_settings/applications.json | 2 +- .../schemas/system_schema/host_settings/schema_celaction.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index aa1d7387d0..77b91037c9 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1270,7 +1270,7 @@ "variants": { "current": { "enabled": true, - "variant_label": "Local", + "variant_label": "Current", "use_python_2": false, "executables": { "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index 82be15c3b0..b104e3bb82 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -28,8 +28,8 @@ "name": "template_host_variant", "template_data": [ { - "app_variant_label": "Local", - "app_variant": "local" + "app_variant_label": "Current", + "app_variant": "current" } ] } From 08691e257d460ce537a62680d0e3eadf212858e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:13:06 +0100 Subject: [PATCH 292/409] celaction: ext added to anatomy data --- .../hosts/celaction/plugins/publish/collect_render_path.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index 9cbb0e4880..ec89fc2e35 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -21,7 +21,8 @@ class CollectRenderPath(pyblish.api.InstancePlugin): padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ "frame": f"%0{padding}d", - "representation": "png" + "representation": "png", + "ext": "png" }) anatomy_filled = anatomy.format(anatomy_data) From 72840c2805460aeb469388ef02b223b2ca98617f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 16:13:36 +0100 Subject: [PATCH 293/409] do not validate existence of maketx path after calling 'get_oiio_tools_path' --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 403b4ee6bc..df07a674dc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -90,7 +90,7 @@ def maketx(source, destination, args, logger): maketx_path = get_oiio_tools_path("maketx") - if not os.path.exists(maketx_path): + if not maketx_path: print( "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") From 0167886c1396cbdd76ddae583e68217b9f165515 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:21:40 +0100 Subject: [PATCH 294/409] celaction: removing resolution from cli --- openpype/hosts/celaction/api/cli.py | 6 ------ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +--- .../plugins/publish/collect_celaction_instances.py | 9 ++++----- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..1214898e3b 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,12 +35,6 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index b14fb12797..e4a3bee5ee 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,9 +50,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*", + "--frameEnd *END*" ] winreg.SetValueEx( diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index 1d2d9da1af..b5f99a1416 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -52,8 +52,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "subset": subset, "label": scene_file, "family": family, - "families": [family, "ftrack"], - "representations": list() + "families": [], + "representations": [] }) # adding basic script data @@ -72,7 +72,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): self.log.info('Publishing Celaction workfile') # render instance - family = "render.farm" subset = f"render{task}Main" instance = context.create_instance(name=subset) # getting instance state @@ -81,8 +80,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): # add assetEntity data into instance instance.data.update({ "label": "{} - farm".format(subset), - "family": family, - "families": [family], + "family": "render.farm", + "families": [], "subset": subset }) From 31babaac5fa7c33126dad277d4e28b4ff5aef184 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:07:39 +0100 Subject: [PATCH 295/409] change how extensions are checked when finding executable --- openpype/lib/vendor_bin_utils.py | 58 +++++++++++++++++++------------- 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 099f9a34ba..91ba94c60e 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -70,24 +70,21 @@ def find_executable(executable): low_platform = platform.system().lower() _, ext = os.path.splitext(executable) - # Prepare variants for which it will be looked - variants = [executable] - # Add other extension variants only if passed executable does not have one - if not ext: - if low_platform == "windows": - exts = [".exe", ".ps1", ".bat"] - for ext in os.getenv("PATHEXT", "").split(os.pathsep): - ext = ext.lower() - if ext and ext not in exts: - exts.append(ext) - else: - exts = [".sh"] + # Prepare extensions to check + exts = set() + if ext: + exts.add(ext.lower()) - for ext in exts: - variant = executable + ext - if is_file_executable(variant): - return variant - variants.append(variant) + else: + # Add other possible extension variants only if passed executable + # does not have any + if low_platform == "windows": + exts |= {".exe", ".ps1", ".bat"} + for ext in os.getenv("PATHEXT", "").split(os.pathsep): + exts.add(ext.lower()) + + else: + exts |= {".sh"} # Get paths where to look for executable path_str = os.environ.get("PATH", None) @@ -97,13 +94,26 @@ def find_executable(executable): elif hasattr(os, "defpath"): path_str = os.defpath - if path_str: - paths = path_str.split(os.pathsep) - for path in paths: - for variant in variants: - filepath = os.path.abspath(os.path.join(path, variant)) - if is_file_executable(filepath): - return filepath + if not path_str: + return None + + paths = path_str.split(os.pathsep) + for path in paths: + if not os.path.isdir(path): + continue + for filename in os.listdir(path): + filepath = os.path.abspath(os.path.join(path, filename)) + # Filename matches executable exactly + if filename == executable and is_file_executable(filepath): + return filepath + + basename, ext = os.path.splitext(filename) + if ( + basename == executable + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath return None From 3ca4c04a158b99e77d6f18b171ababd91d02eae0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:08:45 +0100 Subject: [PATCH 296/409] added ability to fill only extension when is missing --- openpype/lib/vendor_bin_utils.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 91ba94c60e..16e2c197f9 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -60,9 +60,10 @@ def find_executable(executable): path to file. Returns: - str: Full path to executable with extension (is file). - None: When the executable was not found. + Union[str, None]: Full path to executable with extension which was + found otherwise None. """ + # Skip if passed path is file if is_file_executable(executable): return executable @@ -86,6 +87,21 @@ def find_executable(executable): else: exts |= {".sh"} + # Executable is a path but there may be missing extension + # - this can happen primarily on windows where + # e.g. "ffmpeg" should be "ffmpeg.exe" + exe_dir, exe_filename = os.path.split(executable) + if exe_dir and os.path.isdir(exe_dir): + for filename in os.listdir(exe_dir): + filepath = os.path.join(exe_dir, filename) + basename, ext = os.path.splitext(filename) + if ( + basename == exe_filename + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath + # Get paths where to look for executable path_str = os.environ.get("PATH", None) if path_str is None: @@ -114,6 +130,7 @@ def find_executable(executable): and is_file_executable(filepath) ): return filepath + return None From 453cada172b5962921af9d3dc61c64b0b379d277 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:09:16 +0100 Subject: [PATCH 297/409] change how oiio tools executables are found --- openpype/lib/vendor_bin_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 16e2c197f9..b6797dbba0 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -299,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"): oiio_dir = get_vendor_bin_path("oiio") if platform.system().lower() == "linux": oiio_dir = os.path.join(oiio_dir, "bin") - default_path = os.path.join(oiio_dir, tool) - if _oiio_executable_validation(default_path): + default_path = find_executable(os.path.join(oiio_dir, tool)) + if default_path and _oiio_executable_validation(default_path): tool_executable_path = default_path # Look to PATH for the tool From 6925a96ee64a57ee928b39d951a85a44296469ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:09:48 +0100 Subject: [PATCH 298/409] celaction: return back resolution override --- openpype/hosts/celaction/api/cli.py | 6 ++++++ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 1214898e3b..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,6 +35,12 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index e4a3bee5ee..81f77c1654 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,7 +50,9 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*" + "--frameEnd *END*", + "--resolutionWidth *X*", + "--resolutionHeight *Y*" ] winreg.SetValueEx( From a17f516597a39402e8032bc66b5027f47b199086 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:10:07 +0100 Subject: [PATCH 299/409] celaction: deadline submitter to modules --- .../plugins/publish/submit_celaction_deadline.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) rename openpype/{hosts/celaction => modules/deadline}/plugins/publish/submit_celaction_deadline.py (95%) diff --git a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py similarity index 95% rename from openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea109e9445..8a3160e83d 100644 --- a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -37,13 +37,12 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): instance.data["toBeRenderedOn"] = "deadline" context = instance.context - deadline_url = ( - context.data["system_settings"] - ["modules"] - ["deadline"] - ["DEADLINE_REST_URL"] - ) - assert deadline_url, "Requires DEADLINE_REST_URL" + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + # if custom one is set in instance, use that + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") + assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) self._comment = context.data.get("comment", "") From 66bbaf6fccce75e879f29729443531786693efab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:13:21 +0100 Subject: [PATCH 300/409] celaction: project width and height to hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 81f77c1654..cde3a0c723 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -19,6 +19,10 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): + project_doc = self.data["project_doc"] + width = project_doc["data"]["resolutionWidth"] + height = project_doc["data"]["resolutionHeight"] + # Add workfile path to launch arguments workfile_path = self.workfile_path() if workfile_path: @@ -70,8 +74,8 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) - winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) + winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width) + winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height) # making sure message dialogs don't appear when overwriting path_overwrite_scene = "\\".join([ From d7e6b030fe0ac0323b332b14b2c9bb39e839312b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 11:55:21 +0100 Subject: [PATCH 301/409] refactore extract hierarchy plugin --- .../publish/extract_hierarchy_avalon.py | 369 +++++++++++------- 1 file changed, 223 insertions(+), 146 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 6b4e5f48c5..a9c0593f9f 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,9 +1,8 @@ +import collections from copy import deepcopy import pyblish.api from openpype.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, + get_assets, get_archived_assets ) from openpype.pipeline import legacy_io @@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): - # processing starts here if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return @@ -25,161 +23,240 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if not legacy_io.Session: legacy_io.install() - project_name = legacy_io.active_project() hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.project = None - self.import_to_avalon(context, project_name, hierarchy_context) + project_name = context.data["projectName"] + asset_names = self.extract_asset_names(hierarchy_context) - def import_to_avalon( + asset_docs_by_name = {} + for asset_doc in get_assets(project_name, asset_names=asset_names): + name = asset_doc["name"] + asset_docs_by_name[name] = asset_doc + + archived_asset_docs_by_name = collections.defaultdict(list) + for asset_doc in get_archived_assets( + project_name, asset_names=asset_names + ): + name = asset_doc["name"] + archived_asset_docs_by_name[name].append(asset_doc) + + project_doc = None + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data, None)) + + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, entity_data, parent = item + + entity_type = entity_data["entity_type"] + if entity_type.lower() == "project": + new_parent = project_doc = self.sync_project( + context, + entity_data + ) + + else: + new_parent = self.sync_asset( + context, + name, + entity_data, + parent, + project_doc, + asset_docs_by_name, + archived_asset_docs_by_name + ) + + children = entity_data.get("childs") + if not children: + continue + + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data, new_parent)) + + def extract_asset_names(self, hierarchy_context): + """Extract all possible asset names from hierarchy context. + + Args: + hierarchy_context (Dict[str, Any]): Nested hierarchy structure. + + Returns: + Set[str]: All asset names from the hierarchy structure. + """ + + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data)) + + asset_names = set() + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, data = item + if data["entity_type"].lower() != "project": + asset_names.add(name) + + children = data.get("childs") + if children: + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data)) + return asset_names + + def sync_project(self, context, entity_data): + project_doc = context.data["projectEntity"] + + if "data" not in project_doc: + project_doc["data"] = {} + current_data = project_doc["data"] + + changes = {} + entity_type = entity_data["entity_type"] + if current_data.get("entityType") != entity_type: + changes["entityType"] = entity_type + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + if key not in current_data or current_data[key] != value: + update_key = "data.{}".format(key) + changes[update_key] = value + current_data[key] = value + + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": project_doc["_id"]}, + {"$set": changes} + ) + return project_doc + + def sync_asset( self, context, - project_name, - input_data, - parent=None, + asset_name, + entity_data, + parent, + project, + asset_docs_by_name, + archived_asset_docs_by_name ): - for name in input_data: - self.log.info("input_data[name]: {}".format(input_data[name])) - entity_data = input_data[name] - entity_type = entity_data["entity_type"] + project_name = project["name"] + # Prepare data for new asset or for update comparison + data = { + "entityType": entity_data["entity_type"] + } - data = {} - data["entityType"] = entity_type + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + data[key] = value - # Custom attributes. - for k, val in entity_data.get("custom_attributes", {}).items(): - data[k] = val + data["inputs"] = entity_data.get("inputs") or [] - if entity_type.lower() != "project": - data["inputs"] = entity_data.get("inputs", []) + # Parents and visual parent are empty if parent is project + parents = [] + parent_id = None + if project["_id"] != parent["_id"]: + parent_id = parent["_id"] + # Use parent's parents as source value + parents.extend(parent["data"]["parents"]) + # Add parent's name to parents + parents.append(parent["name"]) - # Tasks. - tasks = entity_data.get("tasks", {}) - if tasks is not None or len(tasks) > 0: - data["tasks"] = tasks - parents = [] - visualParent = None - # do not store project"s id as visualParent - if self.project is not None: - if self.project["_id"] != parent["_id"]: - visualParent = parent["_id"] - parents.extend( - parent.get("data", {}).get("parents", []) - ) - parents.append(parent["name"]) - data["visualParent"] = visualParent - data["parents"] = parents + data["visualParent"] = parent_id + data["parents"] = parents - update_data = True - # Process project - if entity_type.lower() == "project": - entity = get_project(project_name) - # TODO: should be in validator? - assert (entity is not None), "Did not find project in DB" - - # get data from already existing project - cur_entity_data = entity.get("data") or {} - cur_entity_data.update(data) - data = cur_entity_data - - self.project = entity - # Raise error if project or parent are not set - elif self.project is None or parent is None: - raise AssertionError( - "Collected items are not in right order!" + asset_doc = asset_docs_by_name.get(asset_name) + # --- Create/Unarchive asset and end --- + if not asset_doc: + # Just use tasks from entity data as they are + # - this is different from the case when tasks are updated + data["tasks"] = entity_data.get("tasks") or {} + archived_asset_doc = None + for archived_entity in archived_asset_docs_by_name[asset_name]: + archived_parents = ( + archived_entity + .get("data", {}) + .get("parents") ) - # Else process assset - else: - entity = get_asset_by_name(project_name, name) - if entity: - # Do not override data, only update - cur_entity_data = entity.get("data") or {} - entity_tasks = cur_entity_data["tasks"] or {} + if data["parents"] == archived_parents: + archived_asset_doc = archived_entity + break - # create tasks as dict by default - if not entity_tasks: - cur_entity_data["tasks"] = entity_tasks - - new_tasks = data.pop("tasks", {}) - if "tasks" not in cur_entity_data and not new_tasks: - continue - for task_name in new_tasks: - if task_name in entity_tasks.keys(): - continue - cur_entity_data["tasks"][task_name] = new_tasks[ - task_name] - cur_entity_data.update(data) - data = cur_entity_data - else: - # Skip updating data - update_data = False - - archived_entities = get_archived_assets( - project_name, - asset_names=[name] - ) - unarchive_entity = None - for archived_entity in archived_entities: - archived_parents = ( - archived_entity - .get("data", {}) - .get("parents") - ) - if data["parents"] == archived_parents: - unarchive_entity = archived_entity - break - - if unarchive_entity is None: - # Create entity if doesn"t exist - entity = self.create_avalon_asset( - name, data - ) - else: - # Unarchive if entity was archived - entity = self.unarchive_entity(unarchive_entity, data) - - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - entity + # Create entity if doesn't exist + if archived_asset_doc is None: + return self.create_avalon_asset( + asset_name, data, project ) - if update_data: - # Update entity data with input data - legacy_io.update_many( - {"_id": entity["_id"]}, - {"$set": {"data": data}} - ) + return self.unarchive_entity( + archived_asset_doc, data, project + ) - if "childs" in entity_data: - self.import_to_avalon( - context, project_name, entity_data["childs"], entity - ) + # --- Update existing asset --- + # Make sure current entity has "data" key + if "data" not in asset_doc: + asset_doc["data"] = {} + cur_entity_data = asset_doc["data"] + cur_entity_tasks = cur_entity_data.get("tasks") or {} - def unarchive_entity(self, entity, data): + # Tasks + data["tasks"] = {} + new_tasks = entity_data.get("tasks") or {} + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + if task_name in cur_entity_tasks: + src_task_info = deepcopy(cur_entity_tasks[task_name]) + src_task_info.update(task_info) + task_info = src_task_info + + data["tasks"][task_name] = task_info + + changes = {} + for key, value in data.items(): + if key not in cur_entity_data or value != cur_entity_data[key]: + update_key = "data.{}".format(key) + changes[update_key] = value + cur_entity_data[key] = value + + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + asset_doc + ) + + # Update asset in database if necessary + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": asset_doc["_id"]}, + {"$set": changes} + ) + return asset_doc + + def unarchive_entity(self, archived_doc, data, project): # Unarchived asset should not use same data - new_entity = { - "_id": entity["_id"], + asset_doc = { + "_id": archived_doc["_id"], "schema": "openpype:asset-3.0", - "name": entity["name"], - "parent": self.project["_id"], + "name": archived_doc["name"], + "parent": project["_id"], "type": "asset", "data": data } legacy_io.replace_one( - {"_id": entity["_id"]}, - new_entity + {"_id": archived_doc["_id"]}, + asset_doc ) - return new_entity + return asset_doc - def create_avalon_asset(self, name, data): + def create_avalon_asset(self, name, data, project): asset_doc = { "schema": "openpype:asset-3.0", "name": name, - "parent": self.project["_id"], + "parent": project["_id"], "type": "asset", "data": data } @@ -194,27 +271,27 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): project_name, asset_doc ): + asset_name = asset_doc["name"] + new_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(new_parents) + parent_name = project_name + if new_parents: + parent_name = new_parents[-1] + for instance in context: - # Skip instance if has filled asset entity - if instance.data.get("assetEntity"): + # Skip if instance asset does not match + instance_asset_name = instance.data.get("asset") + if asset_name != instance_asset_name: continue - asset_name = asset_doc["name"] - inst_asset_name = instance.data["asset"] - if asset_name == inst_asset_name: - instance.data["assetEntity"] = asset_doc + instance_asset_doc = instance.data.get("assetEntity") + # Update asset entity with new possible changes of asset document + instance.data["assetEntity"] = asset_doc - # get parenting data - parents = asset_doc["data"].get("parents") or list() - - # equire only relative parent - parent_name = project_name - if parents: - parent_name = parents[-1] - - # update avalon data on instance + # Update anatomy data if asset was not set on instance + if not instance_asset_doc: instance.data["anatomyData"].update({ - "hierarchy": "/".join(parents), + "hierarchy": hierarchy, "task": {}, "parent": parent_name }) @@ -241,7 +318,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] active_assets = [] - # filter only the active publishing insatnces + # filter only the active publishing instances for instance in context: if instance.data.get("publish") is False: continue From d92448f923a8da0e094c5b78e645c40b45d6f363 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:02:08 +0100 Subject: [PATCH 302/409] celaction: adding deadline custom plugin --- .../CelAction/CelAction.ico | Bin 0 -> 103192 bytes .../CelAction/CelAction.param | 38 ++++++ .../CelAction/CelAction.py | 121 ++++++++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico new file mode 100644 index 0000000000000000000000000000000000000000..39d61592fe1addb07ed3ef93de362370485a23b9 GIT binary patch literal 103192 zcmeHQ2V4_L7oQ+17Eb(ZC>9jYh9aJ*CI!Yvtch-C}%GyqF4^>y<&9SCuIvvd)< zyn}(Z{5E{Be(h$pp=tdg_5&PnE+#$Pwd3M5(5L>QAp?FM?UI3!p_ z9*8`qYq`0$MK^tmffk0xHhFINd+5)x4>?bta1u{GnrxB#e4a(@6_1Wb=1x7gZK{30 z4oB?j%rLKBZ~mJ$-ri`WK@-vR>td5!SF1CNKeFsoyjQHrad{on_@E2l@k07({*I8r zZGeF?9Co)mCRH~+_Q~SvPrD~HFB?@a#(9pO(Ph+?NzK;wM@JHp2Mv!q>)T;Z z#><(HHePt##ChLlCj;b?y!?EJ9ZTk>@(n{fI#2RBzwOtg57Vr5QCRv(;fPt++K;%g zvY&B^9%{~x7q!>>>xBt2pTItBtUGNxvcIv&m(p!*FmXq=j?@FEe@vh{l4@JZ@*+$18?8#QTLrLpi{|1*jAsSr?~a%;lS=?HIvnAP4M_@ zu9=2A8Zj-5;zWBs1UOD?mc7F6gHf{k@oq+eBU+@e%$Na*bEpZ;g`<6Zmn4Z&jE1K8 zHg!%ozio%;Q`DvWK{gE=n(8tw`nc_-a*jQn*r2ajbStY@)%N#XVbuHYAUhz8}v zi9X+{?c`gx+57tKHm;9o%rNgV%gxgR?XO{BJ1C;BM{v@d_p{wRx1!D0vd4~e$cTC6 z>)!K``|ro8VeAn#ZS)$Bt5##>`!pZXn?F1D{>g1cr`*Gr>@%j06LSM%wrv-WPR!#6 zj%anu7`>fk&rUEqxY%{C`~IxqsQ2HsXJu7!ePb zpKmyboztS(L)YQ#_VrCYf(==nso?0t;Vaty8q{&hy?}`Q|BJQ_)$_2_Te$Q7`Heko zkB6RW$uL-EVcTy28qhS_Yv<>lP7RaicYY8us{YAK!OksD3w!+#;L~#XEVs>i41>O{ zALkfZ#!vC%x+`+We>=F3*+M*A;_75%#j?O)Fr zwZ?5#yNL7V$8GmKdvqf^e(=)IW_B+&da@EjT!x0Zoci)<&zpmKy1f{8B1AR=n(sqX zU(UX`sYiI%{GcWlwmP?J8`s79Xwv-b?XkLbijh@GmZg*>T>xXo= zqz!(wu%2;;);*~CyVC}{xEL*TywA=*ckJmOLD5IIvyKVtqp|3nOYiWQMb6^5q_eCv ze(1kxU>4g}qvO-I);on5=CyrWjavH)3hy^h>^Eh_Y`0+zO_A-Vo{M)49dmbNdU8*% zcu_*Dr6(3MMK;44Gg6;@W<9pu;?Q>mH!f3fHte^JE1F$vojo5|G1Nm{FX-pY*je-T z-M$`EFZ_^mLRjByELvwCUT@TnMs~JI7u!U8%^r+`dpCXGEh8-*?=* z+n`g8l}w$n$p32ZklksUs$1*3OtWME`RAUG?*|#B2(#;WzjnLD8a|)_V`0Cui^G4- zYT}Ul_`(CLYvRVkR&N?~DJ6&9Ib1KwZTw)gdGov;x$EW&8a)<Q+u<%9EzvHw^!@3A-{!ht1JCb$ed+iFS_b>s(HV4?Bdz1qwA~_lRHOyb$Gkm zV}1=!!DRbZ;ifzL9zXH2{#m!ve^9@N+s5BDt~bDl5gX%Sz*?U+xbDJQu`!uzMtX*I zi<>*!Ekf7SW2(re`2e=bAV4u>BZ|p2nW=~TH*P@DUWRX5dW@U<_tsfU#(2)=&g-7m z4j6BPMC1MZhu-w-`-6^a|9~*F7^@~TmW4GGTnTKpa&G!R$(<%x1xB-n8=El(EweTc zXglrdtsKYD_CrH|Ocags>AnBsmQZV+$;g?nBd=^1j*SrAVMlq6cDb<1LYLWL-gf=| z?t^k7=d@dJ(=+(Z|Cs((+dD_M`Z@BS&Q=$fCH)bh&qz%ec5cQ^UC!~Lfu@^{beO$% zUz^0p+s`rNcVyaMMjx6uIIPO|dEdlg*N}SQp>ZD#AHH7QY4xU*G=0a(_CvDc{**&O3LfEm-DxIww5PXK&?8ZfZp?t?%lpF z#o$cy<`19c$5{8eb@4;|U&+ardfW_a#_DC;yOAI6)u*LRy%Wz*m>?a8HLPLtKZd&q z`%md@xhtz)2(>e1f=!pZCrADD)WP+YcfBUdu8^qcF?4n*ruyzI7^on->2 zK+5qy2X%UFfTp%a{YI}69567vlX7(L?JuXn){{;SyBLtIb81q?nD9p|p2=TYm0`>B(nMtog^zK%MAQ#|K!NOuEbMeX;^{$K2-Cw_sy;FHS# z`mlkXFE3B3;l|v*Hzf8)$(d)q^_lRPp=*E7*)Pqix0%Md6!LG{`+eK;1Ap0KH2)W` zx#zuuCiOekV(26o*PQp^*(iND8MV@_?cih6kOP;JaTo8{{I=w@_nv?m&WHD&p75rr zS2eG!;U=ct&n_F8>6`5^IWBK{{j=PQV@?{-um7LpIN#oe=!bAQGg0>zT{c=<_BRnbm zDC}}G*=+Oi`AIf+53!xIbWGPiF+j%?XOC;cok&HnA8$CcX;_nNvBdhat?)}UuQJrusf ze_c$H*_~&*=VxE1!jc|rpLBkAC!SlkW23yApPq4ZeQ;|ea@Z9#X|1VqORi(J(+6+A z$nS9}ePWR7jY~dD?k=yjs6P_STpVb+@Z_Vgs3uMBrT8b+zHJ}R%V=Y$%Un9~@+5R@ zJ7WF%%z0dDWbl{36%!i?lKrFfZ3kW+R!4`~b$VvtFAG1M*^@G1ot|lskz1#^`Ad_3 z`J;326kU%gBbHjcuF;^k+orc;=Ca2;NqaIr|LKia>>drC7#$sSPHH7E)9yAid3n=I zJ`EkiCNx_KLpRTQbilI*gL9pKbLP32F>Q668qWQDaa+oAvqQrB;U@X3F0!9_&v};S z9kA?Zde)9>^YblM8^-TCK7Z=RFKrDN=I5RXr?u_xbgv`-EcaBpYjgKNn*{bhYcCG6 z*%iZBdSj)y!-H8ahO->+AHF`tYxMM4OP+DPQ>I>@!?yQ1`(Vz@&_9#1@2x%X>ob#U zBHOdqFGgAGTJ+H|?y#cS@4fd#ZrRGLxxv%ok1nHlj_18yGcs?i2^gP~@B5DRAjzgj zw>Y0mm-H_*qOG>Hq0kqbKv z1wfm@@OwSHbNm9s+=Uh^FudoiD|`!WeH+Qk$TwS-32O_9c}qMSd1;lN{e!09`X}c<2DKCi=s{!nXR@f`OIp&_*UREZ5_!TjGEmjT;r2>F_t|DPfEfEd-X9iR(y(8W&B*^zRR z@TY6M`-=0#P}KbAwav?Ny% zf#TNx1&Q-0(Wekfl*9byDDTZ?igT;3(y>QTJ`z`rfHM6HUe2Pv^&{`YexiTXs#Pmj znf~+Bk9_Our^NhJqJOnCthx}Z)^sgG9 zs-8>qU)43NT0WwG)%aBPT%!M~u3^>k5&f&ir>f@?{a1AjtCo-GUo}2eJ(uXes%u!a zd_@1M@u}*$ME_M?!>Z*Y`d5ulRnH~*uj(3BEuXgP{|oOG@&8b*TD5YO>7O6>+c)q3 zBYf>a^sknNRTomGf9yxgkF}?;j~(^}`__p6+)=6SiT|%^T`S8iPXF5K6Ru1~l2$wd zME}KO^F8Dc{eKTyE?!2W|KhRv9&%*$&%I|%3E|xu?xUVX=2x6swIz8a%>{*Twm@A* zk@~!ternAld9j0v)Rpx8%B4vCymvb(UX+Dg`R_qznv{{&KYpJ|EKHZ&_u-HmX)cE= zo)2;#bb$9RUYB@pn4fW)Vu$M%sV_)cQes_7#HM)BRz>P^A`Gc7+=Qa_#rYKXVdui~ zn#AUNXp7cAFUC$D+tTVmzBZMgd0x2aNepsAfF#Us!*@={U{!%M65eQErZdy|cq_IG*uE^S{`xxD1O4?_vE<2~3w;kR3=vh3fx zY%oq5gWlzIPul^E2^Yvd%2n4w;2iG&x=yF{&AHp0;=;W@8}9$;G-3l)Q~!#(=Or$r zgs{#cgnM;?;2p(*_1!`nfH`5#Kd5fiMB@9C3Xkca+rsa0aigszddIpiq`WMO1L-n0 z5J7C9YS;?QoAEWjP`5l-S1HuHtPQ|=8nJ<@p>stkVEixq9tQ`W?+JzfQUZ9MSA;n6 z6z5T|LR$g5Nr3x>c8cwhxOO2>wKbtpRy$X*lF_??93 zkW~|z??);;rvp7ksG)CVV?sKy0qwUHsSP~&&juJ2=Wa8K1J6>G(tQ(ITS$jBg3j8H z!uKtK#0EmpW!eVt{l5@=zm(n=z&!%KXQ0bMh`-}3`HtK7jiPo-kJkn8dLOOVl5Btv zK9_V^VguT(dzyQR4TPdIv|jO=umF55iyH?NZ2;z!ztL@xPmAbdnG5F(+Cu?f!s&Mj{}((6m4KU-3|%07Xh#V7y4Sj20uFiE24NC_!vOp zGIV*UyHI;2C!Es=U_H`0#rowXEuy#yPD+WR@V*e#-AuPbLhVHW^f(V}AivNKWS$?D zltyeod-X2MIiA;*!v=^ynJk!E`dexPqLR*eW%)1;_z-_GEmgb>+h7CZK)-BS$6R=y zm!E#P7&!v2l{P^9$+T35uZ}* zXUa>W@O^`V4G_Q5id9jCw~lQ9t_P0_^%|}ttK*V>XX0x~g)>Pys89rmPho|^s}+rx z*9Kr5vCs-Z36sSlkQ0RTU_6)cD}{Y!RQZ+4?=wey3X4Tndoq;y6yoDnVH;j}meq&|fQ=sBS(e=;8U70~5y2-lH@#kEV)wF?2b7O3_q%+H~)ZP5CM z^1<3AA>prTAL3x@Si`K5xzMI3IlP<6inSJSMlF|eLf&f8)AV3fx z2oMAa0t5kq06~BtKoB4Z5CjMU1Ob8oL4Y7Y5FiK;1PB5I0fGQQfFM8+AP5iy2m%BF zf&f9_>mvZKx{#x^2q0t#n?$(N)x-!H=pkfF$3+O4>&nNCOBt7|uh>3(OdR)5cUSHZ z+Q)}1|0GWED6>5X+;gbc@JAo4!j=kBQtQz<1tGJheB7g&d>k_R3y&;3&}GeK=KTT6c~W>MXo(ckpZRx1E~rOuoM^&C@>&WV4z5cGJ}tV zldmpu=^2n$fg%DT1%$*3aY_ME%EMh=1x)!U^-dv9qRJ7#8(i1{c>pnxG5m+a4xt8h z3P=(s{|EvE0fGQQfFM8+AP5iy%8dZLU)U=*3^}Q!P}CN_kF5)3e*qc+@1^O%UPMMJ z>)tJR)By0#RD=J=tPoTO%6*n8n+NY` z;=MfsWzv-EKMW;S5c}nB7WCQ%`1c2jQAKdRcY(5fp$x_=0)CHxa{|jBrK%p}1p!W` zX5I^1lzI#`# za6HrqvlY+?$ex4c=fR%4nm+i?Cu@!TeAstd(}y5wA(3BtZdHKkYNScp{)zsH{gd-w zvj~v!kBtBH+)5*Zbp=AYt0%zw8norPHV3Jk~!cbOA22HH9@?`wuQ3ewktq& z8flW&KjyI_Nh8cwKqC_QnR*mAvTLFI-1}{d*Z}u_o5Fma5w$-S@SJYd$K@-xrNib#p`vMAo8UQ!I2 zQC~Q5a``yUxznfs11KlXXHog-$G&y>-?GDXDPD9RiU)g)OU5nwz8$Aa(r|hKaie~c z$b&KB+-XplUYIX)o^bCOQ*1c5pe#=##;V~)U|q60tj%o1>oc&Hy-)d(X^9BsViROti}lXWyjbwP0dXG1eKrwezyfS~m8)?rs0=b%fvj9^tUbkpH7ZQp zL}!Y9`v;5x7xJt8o|_<}4c0dhmRXT_kTzTkR9P+-0R#N}GK&p!H{P2q6z`pvgt#we zVLXJ8KdxLIXhCHd^SXen8QdqoP(qlq(cd58z1fU05CH~GS|CQbSOgi{f~@cGclU&% z+(Hca@Am=*3^C--DOU$tP#I)w2^b`CVBX6A7y#cN@Rtzx=@@!!aMJ=Y%EcmJU<4S% z0nsumh8xS(ffiJTXZnpme#y6~IB=hfzeh?F8}19iU&i}e_UF+A#PI#@dVs+_4y*&< z?{U&&qTo{~#lBSsW8eqh->IgFzP{Bq;9Mx2BMZ0*|6u_4`E0n)hy0UZD>c4V{x!iD z$X65W;0nH{$KPQQ!gqKu@pHlf1Ic>N6)00%6McQFZD1XPu1~{tP;o4z<6+oVzE%D; z!52Q)3owA|0;~(MZyWqax9VaX?%**)6McQFZQwB<#=YJ68i3~q^q9z(uI~W`?%yi! zn&b=Q+>C92|L8>TI(uyo;MY;j8G)_tL zDX)SKH1Z$s6JZYI1XQ?BL|d{?1i(QKf&f8)AW*3Yz`BSD&}E?eO6VM{nY4oR^SBJG z9km5NsY6Q2;kwtM>{nk~f__f`(d%1^;SR{dM5iBv->x!oI-L%$eQjj`FKblr18ezc z7LZt42AAC<{Y~>M8}9hp{DW^=HQ;9`*oC7K8VB!U<7W~BAa4lkhQx=@K}k7BSg#%q z&o8QfZT_{rKfr!IP?Cn|tNRTA-`9XBLE?N$^!a|7NSNYEp9A4vTc0%lqE9!=JYW98 zi6{J*QE>!+3Vhp%{glys@bfXr_@nLTZ+_P0GLJtZ*4;`UfABjwyl5*R>rxZvPQyy) zpB=7Kz(Y9r9N_mTXrg^Fm^J@P!@nhSQt;D|FIOqoue`TjRNjYul06{s`%+@TdrB-~ z1LabJq(u^5@%w>d@TZZa=YOK`{u5XReC@?)(sySng@61`5auoKy$|rDBYE#fEO=YM zFYqczoF|oGEW8)=u^=7Zk>bS;hIEERhP)4d<1&@OKi<>iDNwwW@b8y0aXS4uq$Max z$7SjIu4Nqu%Tf>e#Xi~TKu5u+rqI6(_S!14hZxN55m1qIWo664OKIx>&vxJ&$L6OW zrMLy}zFq?UYL&K~$|?{1`(U3>0p}?tV8>G2nEu#)gW+CFD}0-PPrOIlKR}XmQzpEn zEizzxf%{f}&V3W`F**hB=$wc5QOjGnwF-m$|?(dRtFh;xv@hi0qY*cXWgN=&nIEKdI5Q@D~qqv^b5A61N_Ra zRng}~Dy08dnzkydEcjkxzr#)JJB`HH*N_kQ#QYEY@f;@&&Ihg7SMGB#$N)ZboXD{|UU}063;I%+FUd!&aM%hs!6Ej>f>40HtsoR( z?<)w!*o_N93j2ZK5Pzu%hfLdo5I2D10%$-ILIXGyOG8SUP8}{tr{KW(>V?=(%NWQG zsL)T#fcR+v9ONhi0x(x;2lpE0GSnK<8Ui&2Y9*6y4)G4~&YH#!nB$EHlD%iZzn22x z{gd&&@C$(y^QUWy0cg87&}$&PHcHbn_vwhUM+m>q3QmY;^&6KS1X_o9ZHzI8lDdxrUWnkfOWJQlo0ms!EwCrJm=OA65k(? zUn9@1z#k2-qk6#@yB5e2$VwXG-^Ir@^xOpc-o=M!soa;d^I+{~tfs~kb=!gdC&Jog zKHeJ!_VoV(agANq)aD@+jp0Q1$rFHI0WW2Z7;tb_x2~qjf!ss5&nNQ)+!qx0#h=)| zS81wkb=v{Ce};ZJ92f^UFb=T8t8dWcJVzZqfCo!>?>z_a7sdbg1n^LE=ht3C0u0PK zk8Lt|$u1Np$^z@+5X||$>YgmnpDEz~p7&}QJfoBNG<@~%Jb~P8I8i?(bK^(iv0;=t z`qOk7z@aJd(L>YilJ>q21d7b5n6Pf)Atjti|5g;GAm=gnjq-0sSE*{2Qm0_$&y@`~b8V2;avJ zlM?=o(=a3|4=7{7e>$F>mv&f=_|F{KkY}l~GGsy->%8~7<&^=kyfi;Np4+Ka|J|;l z^*{KkU7!1~6U-S$O8f))Psd8`oACbKYWDv#|LK@ZaIbej@N%vn?4f*spD^7I$DzJS zN&ItT2HfC8cEi3{CF)99TQJ@?;=Y{wnD_dxL};U>vN9#5K^q2~du{IVUVFTO>v)-b z0-*1@oM`JXZrpHu&t0bdP@tY3=TYx1Sf-MAuB`lmwNHdt;0;wAeTMZ>7LdRlA(1UY JVj#NS{{bY%R3rcZ literal 0 HcmV?d00001 diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param new file mode 100644 index 0000000000..24c59d2005 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param @@ -0,0 +1,38 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Celaction Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=filename +Label=Executable +Category=Config +CategoryOrder=0 +CategoryIndex=0 +Description=The command executable to run +Required=false +DisableIfBlank=true + +[RenderNameSeparator] +Type=string +Label=RenderNameSeparator +Category=Config +CategoryOrder=0 +CategoryIndex=1 +Description=The separator to use for naming +Required=false +DisableIfBlank=true +Default=. diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py new file mode 100644 index 0000000000..d19adc4fb9 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py @@ -0,0 +1,121 @@ +from System.Text.RegularExpressions import * + +from Deadline.Plugins import * +from Deadline.Scripting import * + +import _winreg + +###################################################################### +# This is the function that Deadline calls to get an instance of the +# main DeadlinePlugin class. +###################################################################### + + +def GetDeadlinePlugin(): + return CelActionPlugin() + + +def CleanupDeadlinePlugin(deadlinePlugin): + deadlinePlugin.Cleanup() + +###################################################################### +# This is the main DeadlinePlugin class for the CelAction plugin. +###################################################################### + + +class CelActionPlugin(DeadlinePlugin): + + def __init__(self): + self.InitializeProcessCallback += self.InitializeProcess + self.RenderExecutableCallback += self.RenderExecutable + self.RenderArgumentCallback += self.RenderArgument + self.StartupDirectoryCallback += self.StartupDirectory + + def Cleanup(self): + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.StartupDirectoryCallback + + def GetCelActionRegistryKey(self): + # Modify registry for frame separation + path = r'Software\CelAction\CelAction2D\User Settings' + _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path) + regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, + _winreg.KEY_ALL_ACCESS) + return regKey + + def GetSeparatorValue(self, regKey): + useSeparator, _ = _winreg.QueryValueEx( + regKey, 'RenderNameUseSeparator') + separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator') + + return useSeparator, separator + + def SetSeparatorValue(self, regKey, useSeparator, separator): + _winreg.SetValueEx(regKey, 'RenderNameUseSeparator', + 0, _winreg.REG_DWORD, useSeparator) + _winreg.SetValueEx(regKey, 'RenderNameSeparator', + 0, _winreg.REG_SZ, separator) + + def InitializeProcess(self): + # Set the plugin specific settings. + self.SingleFramesOnly = False + + # Set the process specific settings. + self.StdoutHandling = True + self.PopupHandling = True + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Rendering.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Wait.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Timeline Scrub.*") + + celActionRegKey = self.GetCelActionRegistryKey() + + self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault( + "RenderNameSeparator", ".").strip()) + + def RenderExecutable(self): + return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip()) + + def RenderArgument(self): + arguments = RepositoryUtils.CheckPathMapping( + self.GetPluginInfoEntry("Arguments").strip()) + arguments = arguments.replace( + "", str(self.GetStartFrame())) + arguments = arguments.replace("", str(self.GetEndFrame())) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetStartFrame()) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetEndFrame()) + arguments = arguments.replace("", "\"") + return arguments + + def StartupDirectory(self): + return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip() + + def ReplacePaddedFrame(self, arguments, pattern, frame): + frameRegex = Regex(pattern) + while True: + frameMatch = frameRegex.Match(arguments) + if frameMatch.Success: + paddingSize = int(frameMatch.Groups[1].Value) + if paddingSize > 0: + padding = StringUtils.ToZeroPaddedString( + frame, paddingSize, False) + else: + padding = str(frame) + arguments = arguments.replace( + frameMatch.Groups[0].Value, padding) + else: + break + + return arguments From 4a9b214d8daf7b93634248518572659dc3094001 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 12:12:58 +0100 Subject: [PATCH 303/409] fix sync of asset docs on instances --- .../plugins/publish/extract_hierarchy_avalon.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index a9c0593f9f..b2a6adc210 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -59,7 +59,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): else: new_parent = self.sync_asset( - context, name, entity_data, parent, @@ -67,6 +66,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ) + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + new_parent + ) children = entity_data.get("childs") if not children: @@ -132,7 +137,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): def sync_asset( self, - context, asset_name, entity_data, parent, @@ -140,7 +144,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ): - project_name = project["name"] # Prepare data for new asset or for update comparison data = { "entityType": entity_data["entity_type"] @@ -219,13 +222,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): changes[update_key] = value cur_entity_data[key] = value - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - asset_doc - ) - # Update asset in database if necessary if changes: # Update entity data with input data From 04ac17455790ffdb6275ae31d0ec4bb4f595732e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:30:31 +0100 Subject: [PATCH 304/409] adding .eslintignore file for deadline plugin --- openpype/hosts/celaction/resources/.eslintignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 openpype/hosts/celaction/resources/.eslintignore diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore new file mode 100644 index 0000000000..17a45423da --- /dev/null +++ b/openpype/hosts/celaction/resources/.eslintignore @@ -0,0 +1 @@ +deadline_custom_plugin/CelAction/* \ No newline at end of file From 86608dd106f2421062a8746b85caa89a729cb714 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:40:53 +0100 Subject: [PATCH 305/409] trying better flake8 ignore --- openpype/hosts/celaction/resources/.eslintignore | 1 - openpype/hosts/celaction/resources/.flake8 | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) delete mode 100644 openpype/hosts/celaction/resources/.eslintignore create mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore deleted file mode 100644 index 17a45423da..0000000000 --- a/openpype/hosts/celaction/resources/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -deadline_custom_plugin/CelAction/* \ No newline at end of file diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 new file mode 100644 index 0000000000..6cc095c684 --- /dev/null +++ b/openpype/hosts/celaction/resources/.flake8 @@ -0,0 +1,3 @@ +[flake8] +exclude = + */deadline_custom_plugin/CelAction \ No newline at end of file From f8e3e99aad1af2c4571725f6be7c9617a4768218 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:42:29 +0100 Subject: [PATCH 306/409] flake8 ignore almost done --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 0a9664033d..a721b8e9f6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,8 @@ exclude = docs, */vendor, website, - openpype/vendor + openpype/vendor, + */deadline_custom_plugin/CelAction max-complexity = 30 From ceabbe0de37a09915661456dd073ab55a98f8deb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:44:34 +0100 Subject: [PATCH 307/409] flake8 file clearing --- openpype/hosts/celaction/resources/.flake8 | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 deleted file mode 100644 index 6cc095c684..0000000000 --- a/openpype/hosts/celaction/resources/.flake8 +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -exclude = - */deadline_custom_plugin/CelAction \ No newline at end of file From 6cefb2ec4b63744b0bed508b3d9671d69e022c57 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 15:01:14 +0100 Subject: [PATCH 308/409] celaction: deadline and settings improvements --- .../plugins/publish/collect_render_path.py | 26 +++++------ .../publish/submit_celaction_deadline.py | 5 +-- .../defaults/project_settings/celaction.json | 12 ++--- .../defaults/project_settings/deadline.json | 9 ++++ .../schema_project_celaction.json | 39 ++++------------ .../schema_project_deadline.json | 45 +++++++++++++++++++ 6 files changed, 82 insertions(+), 54 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index ec89fc2e35..e5871f8792 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -11,29 +11,29 @@ class CollectRenderPath(pyblish.api.InstancePlugin): families = ["render.farm"] # Presets - anatomy_render_key = None - publish_render_metadata = None + output_extension = "png" + anatomy_template_key_render_files = None + anatomy_template_key_metadata = None def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy_data["family"] = "render" - padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ - "frame": f"%0{padding}d", - "representation": "png", - "ext": "png" + "family": "render", + "representation": self.output_extension, + "ext": self.output_extension }) anatomy_filled = anatomy.format(anatomy_data) # get anatomy rendering keys - anatomy_render_key = self.anatomy_render_key or "render" - publish_render_metadata = self.publish_render_metadata or "render" + r_anatomy_key = self.anatomy_template_key_render_files + m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[anatomy_render_key]["folder"] - render_path = anatomy_filled[anatomy_render_key]["path"] + render_dir = anatomy_filled[r_anatomy_key]["folder"] + render_path = anatomy_filled[r_anatomy_key]["path"] + self.log.debug("__ render_path: `{}`".format(render_path)) # create dir if it doesnt exists try: @@ -47,9 +47,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(publish_render_metadata): + if anatomy_filled.get(m_anatomy_key): instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - publish_render_metadata]["folder"] + m_anatomy_key]["folder"] self.log.info("Metadata render path: `{}`".format( instance.data["publishRenderMetadataFolder"] )) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 8a3160e83d..e39c2c0061 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -7,11 +7,10 @@ import requests import pyblish.api -class ExtractCelactionDeadline(pyblish.api.InstancePlugin): +class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". + Renders are submitted to a Deadline Web Service. """ diff --git a/openpype/settings/defaults/project_settings/celaction.json b/openpype/settings/defaults/project_settings/celaction.json index a4a321fb27..dbe5625f06 100644 --- a/openpype/settings/defaults/project_settings/celaction.json +++ b/openpype/settings/defaults/project_settings/celaction.json @@ -1,13 +1,9 @@ { "publish": { - "ExtractCelactionDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10 + "CollectRenderPath": { + "output_extension": "png", + "anatomy_template_key_render_files": "render", + "anatomy_template_key_metadata": "render" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a..8e892bb67e 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -70,6 +70,15 @@ "department": "", "multiprocess": true }, + "CelactionSubmitDeadline": { + "enabled": true, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10 + }, "ProcessSubmittedJobOnFarm": { "enabled": true, "deadline_department": "", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json index 500e5b2298..15d9350c84 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json @@ -14,45 +14,24 @@ { "type": "dict", "collapsible": true, - "checkbox_key": "enabled", - "key": "ExtractCelactionDeadline", - "label": "ExtractCelactionDeadline", + "key": "CollectRenderPath", + "label": "CollectRenderPath", "is_group": true, "children": [ { - "type": "boolean", - "key": "enabled", - "label": "Enabled" + "type": "text", + "key": "output_extension", + "label": "Output render file extension" }, { "type": "text", - "key": "deadline_department", - "label": "Deadline apartment" - }, - { - "type": "number", - "key": "deadline_priority", - "label": "Deadline priority" + "key": "anatomy_template_key_render_files", + "label": "Anatomy template key: render files" }, { "type": "text", - "key": "deadline_pool", - "label": "Deadline pool" - }, - { - "type": "text", - "key": "deadline_pool_secondary", - "label": "Deadline pool (secondary)" - }, - { - "type": "text", - "key": "deadline_group", - "label": "Deadline Group" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "Deadline Chunk size" + "key": "anatomy_template_key_metadata", + "label": "Anatomy template key: metadata job file" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b..77d520c54a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -387,6 +387,51 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CelactionSubmitDeadline", + "label": "CelactionSubmitDeadline", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, + { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, + { + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" + }, + { + "type": "text", + "key": "deadline_pool_secondary", + "label": "Deadline pool (secondary)" + }, + { + "type": "text", + "key": "deadline_group", + "label": "Deadline Group" + }, + { + "type": "number", + "key": "deadline_chunk_size", + "label": "Deadline Chunk size" + } + ] + }, { "type": "dict", "collapsible": true, From ecbdac09e21f0f186d78cbb04c8030bbf1588e82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:14:45 +0100 Subject: [PATCH 309/409] celaction: add local render target --- openpype/hosts/celaction/api/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..4c07374b08 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -53,6 +53,7 @@ def main(): pyblish.api.register_plugin_path(PUBLISH_PATH) pyblish.api.register_host(PUBLISH_HOST) + pyblish.api.register_target("local") return host_tools.show_publish() From 6994e48a716e80a8b7cdb344d609826b0874b447 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:15:09 +0100 Subject: [PATCH 310/409] celaction: returning frame to anatomy data --- openpype/hosts/celaction/plugins/publish/collect_render_path.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index e5871f8792..f6db6c000d 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -18,7 +18,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) + padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ + "frame": f"%0{padding}d", "family": "render", "representation": self.output_extension, "ext": self.output_extension From f8b00d7ad4a66b93886b48c8100f42fdf9b6fb25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:16:39 +0100 Subject: [PATCH 311/409] celaction: submit job from published workfile --- .../publish/submit_celaction_deadline.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e39c2c0061..03d59b30fd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -80,6 +80,26 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): render_dir = os.path.normpath(os.path.dirname(render_path)) render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + + for item in instance.context: + if "workfile" in item.data["families"]: + msg = "Workfile (scene) must be published along" + assert item.data["publish"] is True, msg + + template_data = item.data.get("anatomyData") + rep = item.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = instance.context.data["anatomy"].format( + template_data) + template_filled = anatomy_filled["publish"]["path"] + script_path = os.path.normpath(template_filled) + + self.log.info( + "Using published scene for render {}".format(script_path) + ) + jobname = "%s - %s" % (script_name, instance.name) output_filename_0 = self.preview_fname(render_path) @@ -96,7 +116,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): chunk_size = self.deadline_chunk_size # search for %02d pattern in name, and padding number - search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups() + search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) From 2117df1ad2a314e600178dec6c10d977cdfdc6e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:38:14 +0100 Subject: [PATCH 312/409] move deadline plugin to correct place --- .../custom/plugins}/CelAction/CelAction.ico | Bin .../custom/plugins}/CelAction/CelAction.param | 0 .../custom/plugins}/CelAction/CelAction.py | 1 + setup.cfg | 2 +- 4 files changed, 2 insertions(+), 1 deletion(-) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.ico (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.param (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.py (98%) diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py similarity index 98% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py index d19adc4fb9..2d0edd3dca 100644 --- a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py @@ -71,6 +71,7 @@ class CelActionPlugin(DeadlinePlugin): # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Rendering.*") + self.AddPopupIgnorer(".*AutoRender.*") # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Wait.*") diff --git a/setup.cfg b/setup.cfg index a721b8e9f6..10cca3eb3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,7 +9,7 @@ exclude = */vendor, website, openpype/vendor, - */deadline_custom_plugin/CelAction + *deadline/repository/custom/plugins max-complexity = 30 From 891a7e7609b9bd296af0923f3c508f9b8a203689 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:47:22 +0100 Subject: [PATCH 313/409] celaction: restructuring cli.py and kwargs --- openpype/hosts/celaction/api/__init__.py | 1 - openpype/hosts/celaction/hooks/pre_celaction_setup.py | 11 +++++------ .../plugins/publish/collect_celaction_cli_kwargs.py | 10 +++++----- .../plugins/publish/collect_celaction_instances.py | 3 ++- openpype/hosts/celaction/scripts/__init__.py | 1 + .../celaction/{api/cli.py => scripts/publish_cli.py} | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) delete mode 100644 openpype/hosts/celaction/api/__init__.py create mode 100644 openpype/hosts/celaction/scripts/__init__.py rename openpype/hosts/celaction/{api/cli.py => scripts/publish_cli.py} (93%) diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py deleted file mode 100644 index 8c93d93738..0000000000 --- a/openpype/hosts/celaction/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -kwargs = None diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cde3a0c723..21ff38b701 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,12 +1,11 @@ import os import shutil -import subprocess import winreg from openpype.lib import PreLaunchHook, get_openpype_execute_args -from openpype.hosts.celaction import api as caapi +from openpype.hosts.celaction import scripts -CELACTION_API_DIR = os.path.dirname( - os.path.abspath(caapi.__file__) +CELACTION_SCRIPTS_DIR = os.path.dirname( + os.path.abspath(scripts.__file__) ) @@ -38,7 +37,7 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) openpype_executables = subproces_args.pop(0) @@ -108,7 +107,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) + openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 15c5ddaf1c..b82c0f5648 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -9,15 +9,15 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - kwargs = celaction.kwargs.copy() + passing_kwargs = scripts.PASSING_KWARGS.copy() self.log.info("Storing kwargs: %s" % kwargs) - context.set_data("kwargs", kwargs) + context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values - for k, v in kwargs.items(): + for k, v in passing_kwargs.items(): self.log.info(f"Setting `{k}` to instance.data with value: `{v}`") if k in ["frameStart", "frameEnd"]: - context.data[k] = kwargs[k] = int(v) + context.data[k] = passing_kwargs[k] = int(v) else: context.data[k] = v diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index b5f99a1416..35ac7fc264 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "version": version } - celaction_kwargs = context.data.get("kwargs", {}) + celaction_kwargs = context.data.get( + "passingKwargs", {}) if celaction_kwargs: shared_instance_data.update(celaction_kwargs) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py new file mode 100644 index 0000000000..dfd9b37ae2 --- /dev/null +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -0,0 +1 @@ +PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/scripts/publish_cli.py similarity index 93% rename from openpype/hosts/celaction/api/cli.py rename to openpype/hosts/celaction/scripts/publish_cli.py index 4c07374b08..586880dc4c 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -7,7 +7,7 @@ import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -41,7 +41,7 @@ def cli(): parser.add_argument("--resolutionHeight", help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ + scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ def main(): From 91580fdb301dd9907660a6a2b145b775f961ac97 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:51:44 +0100 Subject: [PATCH 314/409] celaction: hook should define data from asset_doc rather then from project doc --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 21ff38b701..a8fc7f322e 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -18,9 +18,9 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): - project_doc = self.data["project_doc"] - width = project_doc["data"]["resolutionWidth"] - height = project_doc["data"]["resolutionHeight"] + asset_doc = self.data["asset_doc"] + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] # Add workfile path to launch arguments workfile_path = self.workfile_path() From a4e6f67692e46208bb80465987359aeebc610ca3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:56:19 +0100 Subject: [PATCH 315/409] celaction: fix kwargs print --- .../plugins/publish/collect_celaction_cli_kwargs.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index b82c0f5648..e552e9ba6a 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api from openpype.hosts.celaction import scripts +from pprint import pformat class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -11,7 +12,10 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): def process(self, context): passing_kwargs = scripts.PASSING_KWARGS.copy() - self.log.info("Storing kwargs: %s" % kwargs) + self.log.info("Storing kwargs ...") + self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) + + # set kwargs to context data context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values From 30550c26b01bbe502abaf74562408bd0f1308475 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:04:19 +0100 Subject: [PATCH 316/409] celaction: render from published workfile fix --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 03d59b30fd..3be864781f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -82,7 +82,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): script_name = os.path.basename(script_path) for item in instance.context: - if "workfile" in item.data["families"]: + if "workfile" in item.data["family"]: msg = "Workfile (scene) must be published along" assert item.data["publish"] is True, msg From 0c475d5b9d1edaad6ce1c47f16e87f297c3443c4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:40:23 +0100 Subject: [PATCH 317/409] celaction: deadline job delay settings --- .../publish/submit_celaction_deadline.py | 78 ++++++++++++++----- .../defaults/project_settings/deadline.json | 3 +- .../schema_project_deadline.json | 5 ++ 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 3be864781f..f716621d59 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -5,7 +5,7 @@ import getpass import requests import pyblish.api - +from openpype.pipeline import legacy_io class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -25,12 +25,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): deadline_pool_secondary = "" deadline_group = "" deadline_chunk_size = 1 - - enviro_filter = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] + deadline_job_delay = "00:00:08:00" def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" @@ -163,10 +158,11 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): # frames from Deadline Monitor "OutputFilename0": output_filename_0.replace("\\", "/"), - # # Asset dependency to wait for at least the scene file to sync. + # # Asset dependency to wait for at least + # the scene file to sync. # "AssetDependency0": script_path "ScheduledType": "Once", - "JobDelay": "00:00:08:00" + "JobDelay": self.deadline_job_delay }, "PluginInfo": { # Input @@ -191,18 +187,58 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - i = 0 - for key, values in dict(os.environ).items(): - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % i: "{key}={value}".format( - key=key, value=values - ) - } - ) - i += 1 + # Include critical environment variables with submission + keys = [ + "PYTHONPATH", + "PATH", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "PYBLISHPLUGINPATH", + "NUKE_PATH", + "TOOL_ENV", + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + # add allowed keys from preset if any + if self.env_allowed_keys: + keys += self.env_allowed_keys + + environment = dict({ + key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session + ) + + for _path in os.environ: + if _path.lower().startswith('openpype_'): + environment[_path] = os.environ[_path] + + # to recognize job from OPENPYPE for turning Event On/Off + environment.update({ + "OPENPYPE_LOG_NO_COLORS": "1", + "OPENPYPE_RENDER_JOB": "1" + }) + + # finally search replace in values of any key + if self.env_search_replace_values: + for key, value in environment.items(): + for _k, _v in self.env_search_replace_values.items(): + environment[key] = value.replace(_k, _v) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 8e892bb67e..6e1c0f3540 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -77,7 +77,8 @@ "deadline_pool": "", "deadline_pool_secondary": "", "deadline_group": "", - "deadline_chunk_size": 10 + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" }, "ProcessSubmittedJobOnFarm": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 77d520c54a..5295b0e9d6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -429,6 +429,11 @@ "type": "number", "key": "deadline_chunk_size", "label": "Deadline Chunk size" + }, + { + "type": "text", + "key": "deadline_job_delay", + "label": "Delay job (timecode dd:hh:mm:ss)" } ] }, From eb1b6e037b09c20bca12ab335ec675aeae0b311e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:42:22 +0100 Subject: [PATCH 318/409] label readability --- .../schemas/projects_schema/schema_project_deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 5295b0e9d6..69f81ed682 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -392,7 +392,7 @@ "collapsible": true, "checkbox_key": "enabled", "key": "CelactionSubmitDeadline", - "label": "CelactionSubmitDeadline", + "label": "Celaction Submit Deadline", "is_group": true, "children": [ { From 9f5e892a678307dec9b3d577a0ae1364a680af06 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:49:09 +0100 Subject: [PATCH 319/409] removing redundant code --- .../plugins/publish/submit_celaction_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index f716621d59..e5b06b007c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -208,10 +208,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): if instance.context.data.get("deadlinePassMongoUrl"): keys.append("OPENPYPE_MONGO") - # add allowed keys from preset if any - if self.env_allowed_keys: - keys += self.env_allowed_keys - environment = dict({ key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session @@ -227,12 +223,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB": "1" }) - # finally search replace in values of any key - if self.env_search_replace_values: - for key, value in environment.items(): - for _k, _v in self.env_search_replace_values.items(): - environment[key] = value.replace(_k, _v) - payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, From 3491ef73ac54a98cab2e759c81a211a44549ea37 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:25:41 +0100 Subject: [PATCH 320/409] remove nuke code --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e5b06b007c..0583e146a7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -199,9 +199,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_USER", "FTRACK_SERVER", "PYBLISHPLUGINPATH", - "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled From f162ec56d76b1fe4a6b4267ea1fdfe3ca6ee6927 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:22 +0100 Subject: [PATCH 321/409] celaction: removing environment from job --- .../publish/submit_celaction_deadline.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 0583e146a7..9a7d74c1f7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -187,47 +187,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - # Include critical environment variables with submission - keys = [ - "PYTHONPATH", - "PATH", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "PYBLISHPLUGINPATH", - "TOOL_ENV", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({ - key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session - ) - - for _path in os.environ: - if _path.lower().startswith('openpype_'): - environment[_path] = os.environ[_path] - - # to recognize job from OPENPYPE for turning Event On/Off - environment.update({ - "OPENPYPE_LOG_NO_COLORS": "1", - "OPENPYPE_RENDER_JOB": "1" - }) - - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) From 193112c18bcb17c829ea4afb6813823bf10d888e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:53 +0100 Subject: [PATCH 322/409] deadline: adding openpype version to global job --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 6362b4ca65..249211e965 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -241,6 +241,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") + environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" From b74675ebb0fc360eaa9af4b8faa57d0bcd5e0541 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 26 Nov 2022 03:31:25 +0000 Subject: [PATCH 323/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index ffabcf8025..bf9f97d5f4 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7" +__version__ = "3.14.8-nightly.1" From 74ab26863c6063eaaaac17d3f005f2c692debf2c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 15:56:35 +0800 Subject: [PATCH 324/409] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 88 +++++++++++++++++++ .../maya/plugins/publish/collect_gltf.py | 18 ++++ .../maya/plugins/publish/extract_gltf.py | 65 ++++++++++++++ .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + .../defaults/project_settings/maya.json | 4 + .../schemas/schema_maya_publish.json | 21 ++++- 8 files changed, 198 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/maya/api/gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_gltf.py diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py new file mode 100644 index 0000000000..dd2a95a6d9 --- /dev/null +++ b/openpype/hosts/maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from pyblish.api import Instance + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut":bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q = True, exists =True): + cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py new file mode 100644 index 0000000000..dba06dca23 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "fbx" not in instance.data["families"]: + instance.data["families"].append("gltf") + diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py new file mode 100644 index 0000000000..1cab40d825 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api.gltf import extract_gltf + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + + self.log.info("Extracting GLB to: {}".format(path)) + + nodes = instance[:] + + self.log.info("Instance: {0}".format(nodes)) + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, #selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, + "glb": True, + "vno": True #visibleNodeOnly + } + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract GLB successful to: {0}".format(path)) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b67..70610da909 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -50,6 +50,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "source", "assembly", "fbx", + "gltf", "textures", "action", "background", diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..3c78109934 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -111,6 +111,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..2a3512471c 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -106,6 +106,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..3413dee83b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -250,6 +250,10 @@ "CollectFbxCamera": { "enabled": false }, + "CollectGLTF": { + "enabled": true, + "glb" : true + }, "ValidateInstanceInContext": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index ab8c6b885e..3aca9b2010 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -35,6 +35,25 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectGLTF", + "label": "Collect Assets for GLTF/GLB export", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "glb", + "label": "Export GLB" + } + ] + }, { "type": "splitter" }, @@ -62,7 +81,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", From 13002a39491410e27e84fd02a9f46cc19510ed52 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:14:29 +0800 Subject: [PATCH 325/409] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 62 +++++++++---------- .../maya/plugins/publish/collect_gltf.py | 1 - .../maya/plugins/publish/extract_gltf.py | 10 +-- .../defaults/project_settings/maya.json | 2 +- 4 files changed, 37 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index dd2a95a6d9..2519bd1220 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -2,43 +2,42 @@ """Tools to work with GLTF.""" import logging -from pyblish.api import Instance - from maya import cmds, mel # noqa log = logging.getLogger(__name__) _gltf_options = { - "of": str, # outputFolder - "cpr": str, # copyright - "sno": bool, # selectedNodeOnly - "sn": str, # sceneName - "glb": bool, # binary - "nbu": bool, # niceBufferURIs - "hbu": bool, # hashBufferURI - "ext": bool, # externalTextures - "ivt": int, # initialValuesTime - "acn": str, # animationClipName - "ast": int, # animationClipStartTime - "aet": int, # animationClipEndTime - "afr": float, # animationClipFrameRate - "dsa": int, # detectStepAnimations - "mpa": str, # meshPrimitiveAttributes - "bpa": str, # blendPrimitiveAttributes - "i32": bool, # force32bitIndices - "ssm": bool, # skipStandardMaterials - "eut":bool, # excludeUnusedTexcoord - "dm": bool, # defaultMaterial - "cm": bool, # colorizeMaterials - "dmy": str, # dumpMaya - "dgl": str, # dumpGLTF - "imd": str, # ignoreMeshDeformers - "ssc": bool, # skipSkinClusters - "sbs": bool, # skipBlendShapes - "rvp": bool, # redrawViewport - "vno": bool # visibleNodesOnly + "of" : str, # outputFolder + "cpr" : str, # copyright + "sno" : bool, # selectedNodeOnly + "sn" : str, # sceneName + "glb" : bool, # binary + "nbu" : bool, # niceBufferURIs + "hbu" : bool, # hashBufferURI + "ext" : bool, # externalTextures + "ivt" : int, # initialValuesTime + "acn" : str, # animationClipName + "ast" : int, # animationClipStartTime + "aet" : int, # animationClipEndTime + "afr" : float, # animationClipFrameRate + "dsa" : int, # detectStepAnimations + "mpa" : str, # meshPrimitiveAttributes + "bpa" : str, # blendPrimitiveAttributes + "i32" : bool, # force32bitIndices + "ssm" : bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm" : bool, # defaultMaterial + "cm" : bool, # colorizeMaterials + "dmy" : str, # dumpMaya + "dgl" : str, # dumpGLTF + "imd" : str, # ignoreMeshDeformers + "ssc" : bool, # skipSkinClusters + "sbs" : bool, # skipBlendShapes + "rvp" : bool, # redrawViewport + "vno" : bool # visibleNodesOnly } + def extract_gltf(parent_dir, filename, **kwargs): @@ -63,6 +62,7 @@ def extract_gltf(parent_dir, log.warning("extract_gltf() does not support option '%s'. " "Flag will be ignored..", key) options.pop(key) + options.pop(value) continue job_args = list() @@ -84,5 +84,5 @@ def extract_gltf(parent_dir, # close the gltf export after finish the export gltf_UI = "maya2glTF_exporter_window" - if cmds.window(gltf_UI, q = True, exists =True): + if cmds.window(gltf_UI, q=True, exists=True): cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index dba06dca23..bbc4e31f92 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -15,4 +15,3 @@ class CollectGLTF(pyblish.api.InstancePlugin): if "fbx" not in instance.data["families"]: instance.data["families"].append("gltf") - diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py index 1cab40d825..f5ceed5f33 100644 --- a/openpype/hosts/maya/plugins/publish/extract_gltf.py +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -7,6 +7,7 @@ from openpype.pipeline import publish from openpype.hosts.maya.api import lib from openpype.hosts.maya.api.gltf import extract_gltf + class ExtractGLB(publish.Extractor): order = pyblish.api.ExtractorOrder @@ -19,7 +20,6 @@ class ExtractGLB(publish.Extractor): filename = "{0}.glb".format(instance.name) path = os.path.join(staging_dir, filename) - self.log.info("Extracting GLB to: {}".format(path)) nodes = instance[:] @@ -28,14 +28,14 @@ class ExtractGLB(publish.Extractor): start_frame = instance.data('frameStart') or \ int(cmds.playbackOptions(query=True, - animationStartTime=True)) + animationStartTime=True))# noqa end_frame = instance.data('frameEnd') or \ int(cmds.playbackOptions(query=True, - animationEndTime=True)) + animationEndTime=True)) # noqa fps = mel.eval('currentTimeUnitToFPS()') options = { - "sno": True, #selectedNodeOnly + "sno": True, # selectedNodeOnly "nbu": True, # .bin instead of .bin0 "ast": start_frame, "aet": end_frame, @@ -43,7 +43,7 @@ class ExtractGLB(publish.Extractor): "dsa": 1, "acn": instance.name, "glb": True, - "vno": True #visibleNodeOnly + "vno": True # visibleNodeOnly } with lib.maintained_selection(): cmds.select(nodes, hi=True, noExpand=True) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 3413dee83b..e73f73161e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,7 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": true, + "enabled": false, "glb" : true }, "ValidateInstanceInContext": { From 6dadff2b58ebbc14c1e3622aa866fca5abe31490 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:17:09 +0800 Subject: [PATCH 326/409] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 54 ++++++++++++++++----------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index 2519bd1220..2a983f1573 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -7,34 +7,34 @@ from maya import cmds, mel # noqa log = logging.getLogger(__name__) _gltf_options = { - "of" : str, # outputFolder - "cpr" : str, # copyright - "sno" : bool, # selectedNodeOnly - "sn" : str, # sceneName - "glb" : bool, # binary - "nbu" : bool, # niceBufferURIs - "hbu" : bool, # hashBufferURI - "ext" : bool, # externalTextures - "ivt" : int, # initialValuesTime - "acn" : str, # animationClipName - "ast" : int, # animationClipStartTime - "aet" : int, # animationClipEndTime - "afr" : float, # animationClipFrameRate - "dsa" : int, # detectStepAnimations - "mpa" : str, # meshPrimitiveAttributes - "bpa" : str, # blendPrimitiveAttributes - "i32" : bool, # force32bitIndices - "ssm" : bool, # skipStandardMaterials + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials "eut": bool, # excludeUnusedTexcoord - "dm" : bool, # defaultMaterial - "cm" : bool, # colorizeMaterials - "dmy" : str, # dumpMaya - "dgl" : str, # dumpGLTF - "imd" : str, # ignoreMeshDeformers - "ssc" : bool, # skipSkinClusters - "sbs" : bool, # skipBlendShapes - "rvp" : bool, # redrawViewport - "vno" : bool # visibleNodesOnly + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly } From 8eae3d395672f599f1fe8dea554ed1d83e1f50e3 Mon Sep 17 00:00:00 2001 From: Joseff Date: Sat, 26 Nov 2022 14:37:15 +0100 Subject: [PATCH 327/409] Update The #include for NotificationManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renamed the include to "Framework/Notifications/NotificationManager.h" Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index c2c7e249c3..322663eeec 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -3,7 +3,7 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" #include "AssetToolsModule.h" -#include "NotificationManager.h" +#include "Framework/Notifications/NotificationManager.h" #include "SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking From c2e9bdf161b7b073a2133efb2ce9d04e2360540d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:35:40 +0100 Subject: [PATCH 328/409] celaction: DL improving code --- .../publish/submit_celaction_deadline.py | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 9a7d74c1f7..7913851d8a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,10 +2,9 @@ import os import re import json import getpass - import requests import pyblish.api -from openpype.pipeline import legacy_io + class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -194,10 +193,15 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): self.expected_files(instance, render_path) self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) + response = requests.post(self.deadline_url, json=payload) if not response.ok: - raise Exception(response.text) + self.log.error("Submission failed!") + self.log.error(response.status_code) + self.log.error(response.content) + self.log.debug(payload) + raise SystemExit(response.text) return response @@ -235,32 +239,29 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): split_path = path.split(split_patern) hashes = "#" * int(search_results[1]) return "".join([split_path[0], hashes, split_path[-1]]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - else: - return path - def expected_files(self, - instance, - path): + self.log.debug("_ path: `{}`".format(path)) + return path + + def expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): - instance.data["expectedFiles"] = list() + instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) - file = os.path.basename(path) + dirpath = os.path.dirname(filepath) + filename = os.path.basename(filepath) - if "#" in file: - pparts = file.split("#") + if "#" in filename: + pparts = filename.split("#") padding = "%0{}d".format(len(pparts) - 1) - file = pparts[0] + padding + pparts[-1] + filename = pparts[0] + padding + pparts[-1] - if "%" not in file: - instance.data["expectedFiles"].append(path) + if "%" not in filename: + instance.data["expectedFiles"].append(filepath) return for i in range(self._frame_start, (self._frame_end + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirpath, (filename % i)).replace("\\", "/") + ) From 1f6be563eb6b28ef0988d55e6a143fe0e98d6aa1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:36:03 +0100 Subject: [PATCH 329/409] celaction: improving code PR feedback --- .../celaction/hooks/pre_celaction_setup.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index a8fc7f322e..62cebf99ed 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,6 +1,7 @@ import os import shutil import winreg +import subprocess from openpype.lib import PreLaunchHook, get_openpype_execute_args from openpype.hosts.celaction import scripts @@ -13,7 +14,6 @@ class CelactionPrelaunchHook(PreLaunchHook): """ Bootstrap celacion with pype """ - workfile_ext = "scn" app_groups = ["celaction"] platforms = ["windows"] @@ -39,28 +39,28 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) - openpype_executables = subproces_args.pop(0) + openpype_executable = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executables + openpype_executable ) parameters = subproces_args + [ - "--currentFile *SCENE*", - "--chunk *CHUNK*", - "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*" + "--currentFile", "*SCENE*", + "--chunk", "*CHUNK*", + "--frameStart", "*START*", + "--frameEnd", "*END*", + "--resolutionWidth", "*X*", + "--resolutionHeight", "*Y*" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters) + subprocess.list2cmdline(parameters) ) # setting resolution parameters From a2abcd252471c3cce2f56e0441043693df12bc0f Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:32:14 +0000 Subject: [PATCH 330/409] Optional viewport refresh on pointcache extraction --- .../hosts/maya/plugins/create/create_pointcache.py | 1 + .../hosts/maya/plugins/publish/extract_pointcache.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index ab8fe12079..cdec140ea8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -28,6 +28,7 @@ class CreatePointCache(plugin.Creator): self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups self.data["worldSpace"] = True # Default to exporting world-space + self.data["refresh"] = False # Default to suspend refresh. # Add options for custom attributes self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 7c1c6d5c12..5f5532e60a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,13 +86,21 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - with suspended_refresh(): + if instance.data.get("refresh", False): with maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, **options) + else: + with suspended_refresh(): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) if "representations" not in instance.data: instance.data["representations"] = [] From 9e2f3ab8685dea532a4b452da0ec6e0ef5ee56da Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:56:22 +0000 Subject: [PATCH 331/409] Disable viewport Pan/Zoom on playblast extraction. --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b19d24fad7..04e3c7bccf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -115,6 +115,10 @@ class ExtractPlayblast(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -135,6 +139,8 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 34d2f1252601a3fed9285a0b72b2ad759e463623 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:04:19 +0000 Subject: [PATCH 332/409] Hound --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 04e3c7bccf..1f9f9db99a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -118,7 +118,7 @@ class ExtractPlayblast(publish.Extractor): # Disable Pan/Zoom. pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) - + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -140,7 +140,7 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) - + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 49ad1dbc97a6a1ea79f8ff273c5f275d4cd84282 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:12:02 +0000 Subject: [PATCH 333/409] Include thumbnail extraction --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2be..06244cf003 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -117,6 +117,10 @@ class ExtractThumbnail(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): # Force viewer to False in call to capture because we have our own # viewer opening call to allow a signal to trigger between @@ -136,6 +140,7 @@ class ExtractThumbnail(publish.Extractor): _, thumbnail = os.path.split(playblast) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) self.log.info("file list {}".format(thumbnail)) From 9aeb7898527d9d2892394402462251a1b89dfe87 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Tue, 29 Nov 2022 13:04:29 +0700 Subject: [PATCH 334/409] bugfix: Use unused 'paths' list --- openpype/tools/standalonepublish/widgets/widget_drop_frame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index f8a8273b26..18c2b27678 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -178,7 +178,7 @@ class DropDataFrame(QtWidgets.QFrame): paths = self._get_all_paths(in_paths) collectionable_paths = [] non_collectionable_paths = [] - for path in in_paths: + for path in paths: ext = os.path.splitext(path)[1] if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) From 37535f35bdc5792e5ac3b0c5acef52cc8ad8c5dd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 29 Nov 2022 21:56:34 +0800 Subject: [PATCH 335/409] gltf extractor for Maya --- openpype/settings/defaults/project_settings/maya.json | 3 +-- .../schemas/projects_schema/schemas/schema_maya_publish.json | 5 ----- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index e73f73161e..59e71b2e29 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,8 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": false, - "glb" : true + "enabled": false }, "ValidateInstanceInContext": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 3aca9b2010..e7a56d0749 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -46,11 +46,6 @@ "type": "boolean", "key": "enabled", "label": "Enabled" - }, - { - "type": "boolean", - "key": "glb", - "label": "Export GLB" } ] }, From decc8df4aef6eb1aef8e55152c3eea0760d1fad2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 29 Nov 2022 17:29:58 +0100 Subject: [PATCH 336/409] :construction: 3dsmax addon basics --- openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.py | 2 - openpype/hosts/max/__init__.py | 10 ++ openpype/hosts/max/addon.py | 16 ++ openpype/hosts/max/api/__init__.py | 13 ++ openpype/hosts/max/api/lib.py | 2 + openpype/hosts/max/api/menu.py | 80 +++++++++ openpype/hosts/max/api/pipeline.py | 153 ++++++++++++++++++ openpype/hosts/max/hooks/set_paths.py | 17 ++ .../hosts/{3dsmax => max/plugins}/__init__.py | 0 .../hosts/{3dsmax => max}/startup/startup.ms | 0 openpype/hosts/max/startup/startup.py | 7 + openpype/settings/entities/enum_entity.py | 2 +- 14 files changed, 299 insertions(+), 3 deletions(-) delete mode 100644 openpype/hosts/3dsmax/api/__init__.py delete mode 100644 openpype/hosts/3dsmax/plugins/__init__.py delete mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/hosts/max/__init__.py create mode 100644 openpype/hosts/max/addon.py create mode 100644 openpype/hosts/max/api/__init__.py create mode 100644 openpype/hosts/max/api/lib.py create mode 100644 openpype/hosts/max/api/menu.py create mode 100644 openpype/hosts/max/api/pipeline.py create mode 100644 openpype/hosts/max/hooks/set_paths.py rename openpype/hosts/{3dsmax => max/plugins}/__init__.py (100%) rename openpype/hosts/{3dsmax => max}/startup/startup.ms (100%) create mode 100644 openpype/hosts/max/startup/startup.py diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py deleted file mode 100644 index dd8c08a6b9..0000000000 --- a/openpype/hosts/3dsmax/startup/startup.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -print("inside python startup") \ No newline at end of file diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py new file mode 100644 index 0000000000..8da0e0ee42 --- /dev/null +++ b/openpype/hosts/max/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + MaxAddon, + MAX_HOST_DIR, +) + + +__all__ = ( + "MaxAddon", + "MAX_HOST_DIR", +) \ No newline at end of file diff --git a/openpype/hosts/max/addon.py b/openpype/hosts/max/addon.py new file mode 100644 index 0000000000..734b87dd21 --- /dev/null +++ b/openpype/hosts/max/addon.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import os +from openpype.modules import OpenPypeModule, IHostAddon + +MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class MaxAddon(OpenPypeModule, IHostAddon): + name = "max" + host_name = "max" + + def initialize(self, module_settings): + self.enabled = True + + def get_workfile_extensions(self): + return [".max"] diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py new file mode 100644 index 0000000000..b6998df862 --- /dev/null +++ b/openpype/hosts/max/api/__init__.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +"""Public API for 3dsmax""" + +from .pipeline import ( + MaxHost +) +from .menu import OpenPypeMenu + + +__all__ = [ + "MaxHost", + "OpenPypeMenu" +] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py new file mode 100644 index 0000000000..e50de85f68 --- /dev/null +++ b/openpype/hosts/max/api/lib.py @@ -0,0 +1,2 @@ +def imprint(attr, data): + ... diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py new file mode 100644 index 0000000000..13ca503b4d --- /dev/null +++ b/openpype/hosts/max/api/menu.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""3dsmax menu definition of OpenPype.""" +from abc import ABCMeta, abstractmethod +import six +from Qt import QtWidgets, QtCore +from pymxs import runtime as rt + +from openpype.tools.utils import host_tools + + +@six.add_metaclass(ABCMeta) +class OpenPypeMenu(object): + + def __init__(self): + self.main_widget = self.get_main_widget() + + @staticmethod + def get_main_widget(): + """Get 3dsmax main window.""" + return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) + + def get_main_menubar(self): + """Get main Menubar by 3dsmax main window.""" + return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] + + def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + menu_bar = self.get_main_menubar() + menu_items = menu_bar.findChildren( + QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) + help_action = None + for item in menu_items: + if name in item.title(): + # we already have OpenPype menu + return item + + if before in item.title(): + help_action = item.menuAction() + + op_menu = QtWidgets.QMenu("&OpenPype") + menu_bar.insertMenu(before, op_menu) + return op_menu + + def build_openpype_menu(self): + openpype_menu = self.get_or_create_openpype_menu() + load_action = QtWidgets.QAction("Load...", openpype_menu) + load_action.triggered.connect(self.load_callback) + openpype_menu.addAction(load_action) + + publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action.triggered.connect(self.publish_callback) + openpype_menu.addAction(publish_action) + + manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action.triggered.connect(self.manage_callback) + openpype_menu.addAction(manage_action) + + library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action.triggered.connect(self.library_callback) + openpype_menu.addAction(library_action) + + openpype_menu.addSeparator() + + workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action.triggered.connect(self.workfiles_callback) + openpype_menu.addAction(workfiles_action) + + def load_callback(self): + host_tools.show_loader(parent=self.main_widget) + + def publish_callback(self): + host_tools.show_publisher(parent=self.main_widget) + + def manage_callback(self): + host_tools.show_subset_manager(parent=self.main_widget) + + def library_callback(self): + host_tools.show_library_loader(parent=self.main_widget) + + def workfiles_callback(self): + host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py new file mode 100644 index 0000000000..2ee5989871 --- /dev/null +++ b/openpype/hosts/max/api/pipeline.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" +import os +import sys +import logging +import contextlib + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +import pyblish.api +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID, +) +from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api import lib +from openpype.hosts.max import MAX_HOST_DIR +from openpype.pipeline.load import any_outdated_containers +from openpype.lib import ( + register_event_callback, + emit_event, +) +from pymxs import runtime as rt # noqa + +log = logging.getLogger("openpype.hosts.max") + +PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + + +class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" + menu = None + + def __init__(self): + super(MaxHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("max") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + log.info("Building menu ...") + + self.menu = OpenPypeMenu() + + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + + # register_event_callback("before.save", before_save) + # register_event_callback("save", on_save) + # register_event_callback("open", on_open) + # register_event_callback("new", on_new) + + # pyblish.api.register_callback( + # "instanceToggled", on_pyblish_instance_toggled + # ) + + self._has_been_setup = True + + def has_unsaved_changes(self): + # TODO: how to get it from 3dsmax? + return True + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] + + def save_workfile(self, dst_path=None): + rt.saveMaxFile(dst_path) + return dst_path + + def open_workfile(self, filepath): + rt.checkForSave() + rt.loadMaxFile(filepath) + return filepath + + def get_current_workfile(self): + return os.path.join(rt.maxFilePath, rt.maxFileName) + + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + rt.callbacks.removeScript(id=rt.name(event.name)) + except RuntimeError as e: + log.info(e) + + rt.callbacks.addScript( + event.name, event.callback, id=rt.Name('OpenPype')) + + @staticmethod + def create_context_node(): + """Helper for creating context holding node.""" + + root_scene = rt.rootScene + + create_attr_script = (""" +attributes "OpenPypeContext" +( + parameters main rollout:params + ( + context type: #string + ) + + rollout params "OpenPype Parameters" + ( + editText editTextContext "Context" type: #string + ) +) + """) + + attr = rt.execute(create_attr_script) + rt.custAttributes.add(root_scene, attr) + + return root_scene.OpenPypeContext.context + + def update_context_data(self, data, changes): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + + lib.imprint(context, data) + + def get_context_data(self): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + return lib.read(context) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + rt.saveMaxFile(dst_path) + + +def ls(): + ... \ No newline at end of file diff --git a/openpype/hosts/max/hooks/set_paths.py b/openpype/hosts/max/hooks/set_paths.py new file mode 100644 index 0000000000..3db5306344 --- /dev/null +++ b/openpype/hosts/max/hooks/set_paths.py @@ -0,0 +1,17 @@ +from openpype.lib import PreLaunchHook + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["max"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/max/plugins/__init__.py similarity index 100% rename from openpype/hosts/3dsmax/__init__.py rename to openpype/hosts/max/plugins/__init__.py diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/max/startup/startup.ms similarity index 100% rename from openpype/hosts/3dsmax/startup/startup.ms rename to openpype/hosts/max/startup/startup.ms diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py new file mode 100644 index 0000000000..afcbd2d132 --- /dev/null +++ b/openpype/hosts/max/startup/startup.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +from openpype.hosts.max.api import MaxHost +from openpype.pipeline import install_host + +host = MaxHost() +install_host(host) + diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index c07350ba07..c0c103ea10 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -152,7 +152,7 @@ class HostsEnumEntity(BaseEnumEntity): schema_types = ["hosts-enum"] all_host_names = [ - "3dsmax", + "max", "aftereffects", "blender", "celaction", From 2e4db127569edce09336d84fcc4835954fbe6ce6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 30 Nov 2022 03:32:24 +0000 Subject: [PATCH 337/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index bf9f97d5f4..9a34c85bf8 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.1" +__version__ = "3.14.8-nightly.2" From 29b9603aab1b14e54026f23965281a1fda7d53a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:26:42 +0100 Subject: [PATCH 338/409] change start_number if input is sequence instead of adding -ss --- openpype/plugins/publish/extract_review.py | 57 +++++++++++++--------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index f299d1c6e9..af49f7d79b 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -598,9 +598,13 @@ class ExtractReview(pyblish.api.InstancePlugin): if temp_data["input_is_sequence"]: # Set start frame of input sequence (just frame in filename) # - definition of input filepath - ffmpeg_input_args.extend([ - "-start_number", str(temp_data["first_sequence_frame"]) - ]) + # - add handle start if output should be without handles + start_number = temp_data["first_sequence_frame"] + if temp_data["without_handles"] and temp_data["handles_are_set"]: + start_number += temp_data["handle_start"] + ffmpeg_input_args.extend( + ["-start_number", str(start_number)] + ) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { @@ -609,49 +613,54 @@ class ExtractReview(pyblish.api.InstancePlugin): # "23.976": "24000/1001" # } # Add framerate to input when input is sequence - ffmpeg_input_args.append( - "-framerate {}".format(temp_data["fps"]) - ) + ffmpeg_input_args.extend([ + "-framerate", str(temp_data["fps"]) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) # - this is definition of an output - ffmpeg_output_args.append( - "-start_number {}".format(temp_data["output_frame_start"]) - ) + ffmpeg_output_args.extend([ + "-start_number", str(temp_data["output_frame_start"]) + ]) # Change output's duration and start point if should not contain # handles - start_sec = 0 if temp_data["without_handles"] and temp_data["handles_are_set"]: - # Set start time without handles - # - check if handle_start is bigger than 0 to avoid zero division - if temp_data["handle_start"] > 0: + # Add -ss (start offset in seconds) if input is not sequence + if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] - ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec)) + # Set start time without handles + # - Skip if start sec is 0.0 + if start_sec > 0.0: + ffmpeg_input_args.extend([ + "-ss", "{:0.10f}".format(start_sec) + ]) # Set output duration inn seconds - ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds)) + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: - ffmpeg_output_args.append("-frames:v {}".format(output_frames_len)) + ffmpeg_output_args.extend([ + "-frames:v", str(output_frames_len) + ]) # Add duration of an input sequence if output is video if ( temp_data["input_is_sequence"] and not temp_data["output_is_sequence"] ): - ffmpeg_input_args.append("-to {:0.10f}".format( - duration_seconds + start_sec - )) + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) # Add video/image input path - ffmpeg_input_args.append( - "-i {}".format( - path_to_subprocess_arg(temp_data["full_input_path"]) - ) - ) + ffmpeg_input_args.extend([ + "-i", path_to_subprocess_arg(temp_data["full_input_path"]) + ]) # Add audio arguments if there are any. Skipped when output are images. if not temp_data["output_ext_is_image"] and temp_data["with_audio"]: From f128425155e0144378882cc0b71d5444d82c9f44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 30 Nov 2022 17:27:12 +0100 Subject: [PATCH 339/409] Update openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../deadline/plugins/publish/submit_celaction_deadline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 7913851d8a..ea44a24459 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -197,9 +197,9 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): response = requests.post(self.deadline_url, json=payload) if not response.ok: - self.log.error("Submission failed!") - self.log.error(response.status_code) - self.log.error(response.content) + self.log.error( + "Submission failed! [{}] {}".format( + response.status_code, response.content)) self.log.debug(payload) raise SystemExit(response.text) From 7a90f8f084b8f0d242564584b4df296106c997f9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 30 Nov 2022 17:33:47 +0100 Subject: [PATCH 340/409] celaction: shifting argparse to publish plugin --- .../publish/collect_celaction_cli_kwargs.py | 18 +++++++++++-- openpype/hosts/celaction/scripts/__init__.py | 1 - .../hosts/celaction/scripts/publish_cli.py | 27 ------------------- 3 files changed, 16 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index e552e9ba6a..bf97dd744b 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api -from openpype.hosts.celaction import scripts +import argparse +import sys from pprint import pformat @@ -10,7 +11,20 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - passing_kwargs = scripts.PASSING_KWARGS.copy() + parser = argparse.ArgumentParser(prog="celaction") + parser.add_argument("--currentFile", + help="Pass file to Context as `currentFile`") + parser.add_argument("--chunk", + help=("Render chanks on farm")) + parser.add_argument("--frameStart", + help=("Start of frame range")) + parser.add_argument("--frameEnd", + help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__ self.log.info("Storing kwargs ...") self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py index dfd9b37ae2..e69de29bb2 100644 --- a/openpype/hosts/celaction/scripts/__init__.py +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -1 +0,0 @@ -PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py index 586880dc4c..39d3f1a94d 100644 --- a/openpype/hosts/celaction/scripts/publish_cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -1,13 +1,11 @@ import os import sys -import argparse import pyblish.api import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -20,30 +18,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -def cli(): - parser = argparse.ArgumentParser(prog="celaction_publish") - - parser.add_argument("--currentFile", - help="Pass file to Context as `currentFile`") - - parser.add_argument("--chunk", - help=("Render chanks on farm")) - - parser.add_argument("--frameStart", - help=("Start of frame range")) - - parser.add_argument("--frameEnd", - help=("End of frame range")) - - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - - scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ - - def main(): # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -59,6 +33,5 @@ def main(): if __name__ == "__main__": - cli() result = main() sys.exit(not bool(result)) From 63eae39de5b26d63b4312c7e57ad8bb5ad5767c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:37:14 +0100 Subject: [PATCH 341/409] moved few lines to make it more logical --- openpype/plugins/publish/extract_review.py | 24 +++++++++------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index af49f7d79b..61f162dfcc 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -616,6 +616,11 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpeg_input_args.extend([ "-framerate", str(temp_data["fps"]) ]) + # Add duration of an input sequence if output is video + if not temp_data["output_is_sequence"]: + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) @@ -627,6 +632,11 @@ class ExtractReview(pyblish.api.InstancePlugin): # Change output's duration and start point if should not contain # handles if temp_data["without_handles"] and temp_data["handles_are_set"]: + # Set output duration in seconds + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) + # Add -ss (start offset in seconds) if input is not sequence if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] @@ -637,26 +647,12 @@ class ExtractReview(pyblish.api.InstancePlugin): "-ss", "{:0.10f}".format(start_sec) ]) - # Set output duration inn seconds - ffmpeg_output_args.extend([ - "-t", "{:0.10}".format(duration_seconds) - ]) - # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: ffmpeg_output_args.extend([ "-frames:v", str(output_frames_len) ]) - # Add duration of an input sequence if output is video - if ( - temp_data["input_is_sequence"] - and not temp_data["output_is_sequence"] - ): - ffmpeg_input_args.extend([ - "-to", "{:0.10f}".format(duration_seconds) - ]) - # Add video/image input path ffmpeg_input_args.extend([ "-i", path_to_subprocess_arg(temp_data["full_input_path"]) From 42588daab5033e5cdd7a0e9c3bbd9d626550c24d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:38:16 +0100 Subject: [PATCH 342/409] unify formatting --- openpype/plugins/publish/extract_review.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 61f162dfcc..9310923a9f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -602,9 +602,9 @@ class ExtractReview(pyblish.api.InstancePlugin): start_number = temp_data["first_sequence_frame"] if temp_data["without_handles"] and temp_data["handles_are_set"]: start_number += temp_data["handle_start"] - ffmpeg_input_args.extend( - ["-start_number", str(start_number)] - ) + ffmpeg_input_args.extend([ + "-start_number", str(start_number) + ]) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { From 7544771744427522841e580ac1cee4945b6d07d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 13:29:18 +0100 Subject: [PATCH 343/409] replace reset with configure locations --- .../modules/ftrack/plugins/publish/integrate_ftrack_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 231bd8e81e..0e8209866f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -46,8 +46,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return session = context.data["ftrackSession"] - # Reset session and reconfigure locations - session.reset() + # Reset session operations and reconfigure locations + session.recorded_operations.clear() + session._configure_locations() try: self.integrate_to_ftrack( From 4acbb9fa1823aec8adcec1e25e69c48a49a51979 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 1 Dec 2022 14:19:30 +0100 Subject: [PATCH 344/409] general: integrate skips transfere files in src == dst --- openpype/plugins/publish/integrate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..7e4fc84658 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,6 +291,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: + if src == dst: + continue + # todo: add support for hardlink transfers file_transactions.add(src, dst) From 15fa6f6f18b00659351c133b5db0bf342c5a0035 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 14:53:00 +0100 Subject: [PATCH 345/409] fix occational double parents issue --- openpype/modules/ftrack/lib/avalon_sync.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 935d1e85c9..0341c25717 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -1556,7 +1556,7 @@ class SyncEntitiesFactory: deleted_entities.append(mongo_id) av_ent = self.avalon_ents_by_id[mongo_id] - av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items = list(av_ent["data"]["parents"]) av_ent_path_items.append(av_ent["name"]) self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) @@ -1855,7 +1855,7 @@ class SyncEntitiesFactory: _vis_par = _avalon_ent["data"]["visualParent"] _name = _avalon_ent["name"] if _name in self.all_ftrack_names: - av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items = list(_avalon_ent["data"]["parents"]) av_ent_path_items.append(_name) av_ent_path = "/".join(av_ent_path_items) # TODO report @@ -1997,7 +1997,7 @@ class SyncEntitiesFactory: {"_id": mongo_id}, item )) - av_ent_path_items = item["data"]["parents"] + av_ent_path_items = list(item["data"]["parents"]) av_ent_path_items.append(item["name"]) av_ent_path = "/".join(av_ent_path_items) self.log.debug( @@ -2110,6 +2110,7 @@ class SyncEntitiesFactory: entity_dict = self.entities_dict[ftrack_id] + final_parents = entity_dict["final_entity"]["data"]["parents"] if archived_by_id: # if is changeable then unarchive (nothing to check here) if self.changeability_by_mongo_id[mongo_id]: @@ -2123,10 +2124,8 @@ class SyncEntitiesFactory: archived_name = archived_by_id["name"] if ( - archived_name != entity_dict["name"] or - archived_parents != entity_dict["final_entity"]["data"][ - "parents" - ] + archived_name != entity_dict["name"] + or archived_parents != final_parents ): return None @@ -2136,11 +2135,7 @@ class SyncEntitiesFactory: for archived in archived_by_name: mongo_id = str(archived["_id"]) archived_parents = archived.get("data", {}).get("parents") - if ( - archived_parents == entity_dict["final_entity"]["data"][ - "parents" - ] - ): + if archived_parents == final_parents: return mongo_id # Secondly try to find more close to current ftrack entity @@ -2350,8 +2345,7 @@ class SyncEntitiesFactory: continue changed = True - parents = [par for par in _parents] - hierarchy = "/".join(parents) + parents = list(_parents) self.entities_dict[ftrack_id][ "final_entity"]["data"]["parents"] = parents From b5a5c72d896eae245adcf43e9a09c2e1f031ac44 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 10:03:56 +0000 Subject: [PATCH 346/409] Comments to resolve. --- openpype/hosts/maya/api/mtoa.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py index 6b9b1d6d44..d19fecf6b5 100644 --- a/openpype/hosts/maya/api/mtoa.py +++ b/openpype/hosts/maya/api/mtoa.py @@ -56,7 +56,8 @@ class _AssExtractor(publish.Extractor): return mask def process(self, instance): - + #What is a dry run? + #ass.rr seems like an abstract variable. Needs clarification. dry_run = instance.data.get("ass.rr") staging_dir = self.staging_dir(instance) @@ -92,6 +93,7 @@ class _AssExtractor(publish.Extractor): else: mask = 44 + #get/set should be plugin options. # Generic options if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), False): @@ -108,6 +110,7 @@ class _AssExtractor(publish.Extractor): keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) + #Targets should already be collected targets = self.get_targets(instance) _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) @@ -116,6 +119,8 @@ class _AssExtractor(publish.Extractor): if not dry_run: self.log.debug("Running command: cmds.arnoldExportAss({})" .format(", ".join(_sorted_kwargs))) + #There should be a context for not updating the viewport from + #pointcache extraction. with vp2_paused_context(): with selection(targets): with self.motion_blur_ctx(mb, keys, length): @@ -131,11 +136,14 @@ class _AssExtractor(publish.Extractor): range_ = range(int(start), int(end) + 1) for i in range_: + #padding amount should be configurable. 3 does not seems + #enough as default. fp = "{}.{:03d}.ass".format(export_path.name, i) with open(fp, "w"): pass result.append(fp) + #Whether its a sequence or not, should already have been determined. if len(result) == 1: filepath = result[0] else: From 11a2ce396b9afec2036668cd69cd1658efee004c Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:28:14 +0000 Subject: [PATCH 347/409] Consistent Python version --- website/docs/dev_requirements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 1c8958d1c0..fa2d996e20 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. From 73393a75b7e33c5dca88dacc4d8f05634da034f3 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:47:06 +0000 Subject: [PATCH 348/409] Note about unrestricted execution on Windows. --- website/docs/dev_build.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d..9c99b26f1e 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Run from source -For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator: + +`Set-ExecutionPolicy -ExecutionPolicy unrestricted` To start OpenPype from source you need to From a465315f034d9e297a27bdda9a9f37a52b21bc97 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 12:08:59 +0000 Subject: [PATCH 349/409] Add optional keyword to suspend_refresh. --- openpype/hosts/maya/api/lib.py | 8 ++++---- .../plugins/publish/extract_pointcache.py | 20 +++++++------------ 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -127,14 +127,14 @@ def get_main_window(): @contextlib.contextmanager -def suspended_refresh(): +def suspended_refresh(suspend=True): """Suspend viewport refreshes""" - + original_state = cmds.refresh(query=True, suspend=True) try: - cmds.refresh(suspend=True) + cmds.refresh(suspend=suspend) yield finally: - cmds.refresh(suspend=False) + cmds.refresh(suspend=original_state) @contextlib.contextmanager diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 5f5532e60a..23b76a48c2 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,21 +86,15 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - if instance.data.get("refresh", False): + with suspended_refresh(suspend=instance.data.get("refresh", False)): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) - else: - with suspended_refresh(): - with maintained_selection(): - cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) + extract_alembic( + file=path, + startFrame=start, + endFrame=end, + **options + ) if "representations" not in instance.data: instance.data["representations"] = [] From b14a0718d274522fe269a10237e628f6110437af Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 14:15:59 +0100 Subject: [PATCH 350/409] update change log and history for release --- CHANGELOG.md | 22 ++++++++++++++++++++++ HISTORY.md | 20 ++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c5f2cf8b5..3cca692b68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + + ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) diff --git a/HISTORY.md b/HISTORY.md index 04a1073c07..f4e132488b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,25 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) From 6078a5746feb875c53d6d37274a52097b447bcd0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:26:03 +0000 Subject: [PATCH 351/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 9a34c85bf8..6903ab4d10 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.2" +__version__ = "3.14.8-nightly.3" From 2195cefe8a76e42b93eab2cf5195e9cb30ca79bd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:32:51 +0000 Subject: [PATCH 352/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 6903ab4d10..b27b98e2fa 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.3" +__version__ = "3.14.8-nightly.4" From 42984c54667ea1f27229368cbc5fc00e425a5575 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:34:53 +0000 Subject: [PATCH 353/409] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index b27b98e2fa..fc687a1263 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.4" +__version__ = "3.14.8" From 412d03d382aef31769f3d6f61e6ec70fa53fda6d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 15:52:04 +0100 Subject: [PATCH 354/409] Merge fixes for tests from branch with Deadline tests. Branch with newly implemented DL tests is targetted to release 3.15, but changes from it affects tests in develop. This branch should solve issues with automatic tests in develop without need of full release 3.15 changes. --- tests/README.md | 10 +++ tests/conftest.py | 12 +++ tests/integration/hosts/aftereffects/lib.py | 22 ++++-- .../test_publish_in_aftereffects.py | 34 ++++++--- ...test_publish_in_aftereffects_multiframe.py | 36 ++++++--- tests/integration/hosts/maya/lib.py | 19 ++++- .../hosts/maya/test_publish_in_maya.py | 57 ++++++++------ tests/integration/hosts/nuke/lib.py | 31 ++++++-- .../hosts/nuke/test_publish_in_nuke.py | 25 +++++-- tests/integration/hosts/photoshop/lib.py | 11 ++- .../photoshop/test_publish_in_photoshop.py | 6 +- tests/lib/db_handler.py | 23 +++--- tests/lib/testing_classes.py | 70 ++++++++++++++---- tests/resources/test_data.zip | Bin 7350 -> 5098 bytes tests/unit/igniter/test_bootstrap_repos.py | 30 ++++---- 15 files changed, 275 insertions(+), 111 deletions(-) diff --git a/tests/README.md b/tests/README.md index 69828cdbc2..d36b6534f8 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a6..7b58b0314d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073d..ffad33d13c 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, + "test_project_test_asset_test_task_v001.aep") + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path @@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs", "Auto-Save"] + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4925cbd2d7..57d5a3e3f1 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Uses generic TestCase to prepare fixtures for test data, testing DBs, @@ -32,10 +32,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -49,27 +49,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index c882e0f9b2..2d95eada99 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -1,15 +1,15 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects - Should publish 5 frames + Should publish 10 frames """ PERSIST = True @@ -19,10 +19,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -36,27 +36,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c065..ab402f36e0 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path @@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b0564428..b7ee228aae 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,8 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) @@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" @@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 11 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba81..baff675da7 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,17 +1,20 @@ import os import pytest -import shutil +import re -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path @@ -41,4 +53,11 @@ class NukeTestClass(HostFixtures): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b5..f84f13fa20 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,17 +1,25 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were @@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): @@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae6..9d51a11c06 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e..4aaf43234d 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b181055012..82e741cc3b 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ class DBHandler: "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): # noqa + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -133,7 +132,11 @@ class DBHandler: db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" @@ -231,13 +234,15 @@ class DBHandler: # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f81095..5e3b11cfc9 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,9 +8,12 @@ import tempfile import shutil import glob import platform +import requests +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler +from openpype.modules import ModulesManager class BaseTest: @@ -36,9 +39,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] @@ -57,7 +60,7 @@ class ModuleUnitTest(BaseTest): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -78,7 +81,8 @@ class ModuleUnitTest(BaseTest): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = (persist or self.PERSIST or + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -125,7 +129,8 @@ class ModuleUnitTest(BaseTest): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -135,13 +140,14 @@ class ModuleUnitTest(BaseTest): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.is_test_failed(request) + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -166,6 +172,13 @@ class ModuleUnitTest(BaseTest): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. @@ -188,7 +201,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -210,10 +223,10 @@ class PublishTest(ModuleUnitTest): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): @@ -310,7 +323,8 @@ class PublishTest(ModuleUnitTest): yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -328,12 +342,33 @@ class PublishTest(ModuleUnitTest): glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) + + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + + return filtered -class HostFixtures(PublishTest): +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): @@ -344,3 +379,8 @@ class HostFixtures(PublishTest): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3519 zcmaJ@2{=^iA0A|BtYb!D2xCNpu{L(enk)$;^`FuhnnK9lwM?=ljAbyiA|y-5(nu5& zl58c)E)0qxZOHPUp}N<-_qpGB&U3zVp7;Ho_xsNGp6{*J`XHNxt-uPkv+%I(fB;wl z06>j{BRUz6*l{r$hfYpY1~IRU$G@92Nq|E{4jX{=@bYx>bM^J}RkE?zB6Dq$ue@4( zY0C)!T!jDt>P#?U8<{iyXRI|*ryh6`hs_fO#nlge8!HvVqqXQ*32TX zI}#Eups@}}3MwtEeHc7gEyia1jDX4iP#*ZH!e&tTWKi9w$yXrUA|yn*!t(*@Fz?bxCih3_1kQRSZYdZ#IXIi{+P|_K*{XTLV`b8=^DbfJ1mcWt z%~{*PoF8lBRsQV>hgIcM2gjUs%e@~(xbxo&H1?2P=m1?Jq7f=mWX=W9!$+<;J8!E7 z?P)6@!ag1Z?MSgYPpbV-hN#0#w=g8ckeDc6wR=KU)lORUYoEQi3xR@SxYtm zZN>2#S43Valot{T76Cf8M?Y}vAS16Txb1Fd*7*>`Lc=KCDy5^jxMU5_kglJC&nG*D?72{$?u|2b#VG261bjOG$`QUx$4glo@FLh+7s{l%x2OXMD2V&wdJM?CLv` zojPuWcPh*dpC|k=+h05ej*|4_H~M%pV}?P)3={ z1b}Womty924Q4MgCOE9v7_)O&aHjbTfog1-B0<#pGEjW8Nwo-8^2%A{V!#pnAQoH7 z;gGK2y==z?uF`-e{ILX05YrCJs}7Dl5;qZhufMp(h?9`2)Q2h^OvP01s(+69OV%sIwAB_~voy`c6^98w6cRMUWe2%GAmu8y> zy{8;?5tWsD(1J(3etfji**Gf{eQzy8VoC5G>|lcxAt?r?(G;{=hEuQqUssskuIdY% z);zkA?dV>Q!;LgK@3?^sB<7BWE@z!oIWZ^a;U~Fh4CK+O|I#4vp_TWTM_vjIn-0gl z@*|J=_7#|X(CivE^{_FegkJk1IqRKGvpaS}cnRt`(lO%4HZSQb{yO8SjOB5$*QUA7 z^0JK_1ewpU+c)QKI$<0^PRP%taXxw?T|V%;KN@+5UR@JIzEoSi>luES>ylHRkRLl; z@Z+$m%E5YrJ6W-UjDq`7o*I z)@Y({0mied;||Y@V{fb);bxY{ZygBK4P3>ElI7W*PW;%YpqI_Ye1_Xt8>!xJP*^Ry z8@aD(dgc_3bF2>>s=M|mg}rv;1+g>{TE!^Of!m z1cBwUEMM@D`l@8r*|4KrfXB4~Z^;)A%(o5R@6mC&r2`D9h(5jl_Ea4ZVe;Zs6mHo% z%KhCJ3_A8mVdZjWrx+Kh+@-JkYM(#MIn3mdS2_8x`SzU~P-NAFSB@V};G6jPFwW@=kCWwlli?BIpot@>Tnz z`XA$}?p}5E7jKIXm(ZAAnw0J7{!S`D<)&uw?0ax8zrUNR{mQ~l+`J#@NkoGfq9`_RRCw?U^J2mL-@B7?^mDHggLb0Yku*QTchMMwD80Rb6F&!=)Ngu%GZICYpTjx82Rg#$`x zl!Qc%)mma6Tsl$a!=`cSX-0HooK&^?zSHWFwHD*ACMPyj+4VL5u#2o0dYr}0C#3{F zPfp6}&Q{f(sfg*WeS)-sa-GRWi3%T`=h2#nm{Ai_jY^Ac^sE*KFWlqswLJ{7x4>`1>D7p3&wfxta;FpRa=S<59i`Z~6FKxn#btfk&>*mgd~G`3 zz^R|p;P{TinVMhk*zjTqb;Fnu0mS$^jo7j>0nIfB6kzI+=|ztUP$Zvqt%E%_4uAeD{9}7x^uKDZHA8VKv#!`PuHk zE!5Tv=9nNTDFQ0IwTf;-MOp3nNVQPue|&x6ZPjD=NbAtO{{U^5P{&8o7ZQiCb1 z8Ao(24(GQxu%{!~)D>*@3xMhx0Qeig%HGk<^_OVuYti^OM4P$UIlBB048`AK92~*+ zj$V%7Kf%ZV~5Z-Q;;l_Z{v#(ni-;f;D+uA>_3_4;u= zI>y9!s)`ru+DgUns)GsnWtapLep49acxd0{2Wk0TWj4*{imJI2Uk69VWvnWFZM?j| ze%qMx8u;{#E>lYWvOov!cI4*YZd;S;@PlY*>`z9zUMyVrD>_u zk;FT^8hqleVs%_zQL(~>Hsp7;rFNAStI;$Q-g4g}F+&Z$Wt|dOA(V#f@d()?^JueI z2U)cFn;&KHmxB*NZdS^iDfzlzBFgn{(~=Rb+ClY_syq@ zw+DCa2k9DL|6G@sbzJI8MOF;0-xKJ?`7i6(4(w`tvBcS|p#=?&>BoG?0hIm$UQ{tb zO$2FgwnIdX(AP~lAbGT67z!sIx^qt}npX#!QxNn-tt?~<2x6Od?}-azkmC>@k(f4A zwcy+(%^;wIk0qB&dBOuG65f_auz@AZ`Y}+I@lXpWVT1wF4do6C+Jqf~BzK)5wNP1B zPwySh7<&=C2p(?rR%|^hkkGAEQqx5JfUgm!$MJ@rogpqsstbr6`<5_qzpKBzqwWlO z6Y2Mn3wYKS?n6iH0A0l_|2%RRu&b+;{eM?+Mx3%jrx;L%8K)~S4NK1(*@JDCX{|kU zzNO?jbBg;{dRI+EXPhq|I;#p+E8K?ToZ#i{Qgb$iMNTqN1L3L>cMP)}>?vltdgr?O zmAW&HF#%i=FJ+B2Nd$sMVzE!qlSES6Dgpj1rF7QGj>&NmaAVOJt>h+pP733Wt+X)@ zk-JY#+)*|CTXwTGHu+J!QQteImc()L@Y_7F6O8yFh@RlYyLSjwXp4;uI(z1>gV8{#P(K>s0e!7z(GEu z7O8qsi2wP_XUyMI=k**{#TS~gH0X8b|47~Mg)<-yEgamCg+?X4mq3NQV?_I=Ku-9t z(~yR;Gd4N@39nxds4$ z>*7KFx5K$qHj`g;K*MlVt$nVv_X$)3cDVrn_J73w!QtraU=3BgKjdKhfxa#2eY_UI zMjkuV_tg*uMbsVDhzxIG?#-hzzCKl);>U0*CfM}iyrUz zfzt#u3p-niNlru03{b6<2 ze|47I|I9RG%Z5L+ai3iuct==wY(TroCgcHI{)4@l!$y z?7%j};EwBS$(9Xc3 zugDm^S_!1PFaDxQBC~61!1HXOu(;I9S)P$mOkJIaSv&vb1E%CaEvg~u>TU$PJ2WbJ z;bg%L?s6;%sxqDQ^N$N_CBQ=y$x5i`qf17efjb+MLH62VC2w(G1nJ4g8OP_T-)FCvpK0!;0$R3atfiSKInhJW>?c1L4GigStKEX#py?w z3EQ8S%J?KQ54C9-QZ}9+ru+d`bU9^XsnK#n_f_L8msnT8C)-A=$*KX`RY~RHFzJj} z)vY0|(uh0UvT)_90Lg`9kGL=$g#}wLS8B9no91|Dm9W}4z|bIVOe!$*A}FuZ+bl3iO(Z4 zH+j0GKb>D}!Aid;>`aEhck0z8K-76bK&8WPj!!qY=4JBTVd zxu8?YW@7igM%t@QPHCUU)|Zg5W(oa?;leTPd>eyY@i&fkp+bm1q+4P&R@&<{D3QD$ zp2LjyyNvaGzMCY?yh#YdcW@nChc~P$mkzc_)r(R=?N4rA zit{0De1m-w!e&{~Nm*)X4jAB&yRVN3cJ7S$-dzG*?IZ*-=PHJYPQM$#hpV$*jOE)d zg99xSvCL1=$SEzQE<{Ba<{GG14P$;n|Mmvk$12EyPk#pG#WOI$Tp$;V2DZ_1E+UJ+ zggL3EmM$xTe<{9Xw<^7sgM@Wu!FrdQ`-~zw13h+Rw@SpkvS+!10@j&&&)cSv;Wmge zGHI}P8lF+|H`CctcE?(Bxdyj31l+k)i;uHonPe1eIvOk0=H4B3TB9|znjax$vX!xSMi@b_C_J~>WmEWUV6y%-V`pG{#)xe=lmcPcUy~>u5LKm z`0GU!bvy|jNHMYJO?K>?uAxulI5AzzPhzk5O==WuwYRp76(YPv~iCu zjVLUI>7d=%^ql7^ZDTaTK-*wo@#d}_2}D(TA*w2pnX`h$_OwiUJZF@GU z%*u1sPNHdxS9k?&_RxXK-!O=DmffOOF|?6yP2ba_OMlE)OhSUuHa8yykxPK&LfklT z@g+&R=_hV07~{EzahwIeSK^x7zN6?ur1!olgz6*&kX|y#h{{- zL3klV!X+3NyDFYZJQgnBmQ+-L5&gCFatLUtYxcuv+5O{$Pd&h9UQj)H&Adqr3{Iu- z#s?rt+cpZjs%WHiTYEwVUm2%iNxJ2f27r|B$Hy_pvu#RHq=6EYef9px)-zrx$^$x#(auVSGjhrUzcAC4bWD9}#R zP524BC4d5j_T$=t;;mP8iA8Gq8e`6O4ZaXLP?cnFMQXAHTn6sEY$V~%TdfH7DpeEm<2N@|@f(b$&Cp%iLu$%A}KGw9GD~eJ)_-Zse z!qyc@IRFl|sKg~o|HAmGlckX&G3V<@8&}NKcOi4HWc-~ z>E2nB#knaP-=d~0;wxBt=t2>gPqq3WM;eQT-Po%7OVM(j9Q*G(%+z9@yi~93`bwKc z6CeZoaNn671Bt zUO0D=u1}u_OUc(Nwu~Bj{CMIy$G>JCpCPV=Tg>J!Wt5*2m6VBB`g)0(6IV~dv$)Ha zCgwAbTI(>X7O3Yj)(E)BvZ^NaSu4{b)XfEXRfxl`@RLd-D8l<^lkj>)eNj9N+ex7K zzIflmVHJK{Z%{1x9hppW+S?sz{KRcIv(qdw{GB4!+i0d_fh=gGIxAeU%U$|mECr~t z$vz)@QOxS=^v#007JIn#S(Eyf1&dH=HNe{1rxv7IEgAP(TB-*Rl0KoiE57R zyutJbBhN~jTv_@QYr_Twu9*lmG#iT0gjED=_ z=g=eL{pN_b^qekuROR(uC5yBn^d4R!8=3?qa8A)UZl2bZd9v51CzUMJ7J;27m=EFo zySLpQi3UcHg+{>r@Bqc3mx~8-4#MO?%Yd@(+1rODaBAHsnF2*b zAoUfP(>!B!*@BStadXG7YZP%pLFo*_Y}h^Lf$;J5eefkCinlkUN}p@>k8*nysjH8z zPgn3Sb(Wj*FNt0qcP`%XjWOTP`9t6Bv!K`A|L-0D_Y+T)IA%K~F2e4?Vmv}Mgtj?| zgx!dEq@7+|mxGlMV_|2c&h^xsj=jT6(qWo&mdk*UOGTuV+m3*OGh5u)b9fclmmkGMsN!zmT!^fS*ro`a?QBb>S*Lx&RB>`r508eC3%T1E@oY)t%+l!XH7yC1C&n+W!UnFa8gUpm=3j z4FF5~191O!U=Luz(=b5PBY>6}A#VdXF(2!xvHyUUtiZdQLzW^y)I_tROP^GdG=nr) z|I;^8KmE!zI22QD?8WuX4GLmdHG5klqd>Op#So63E|f+r81e50Wp8gIhk{^u#(;bc z0-+7Ttde|PQy+&0aafWJV^i;gbYhD-`!&%3f1d!n8%A>BZTxxGgL-_|Oh&>RATF-P z&aQ5bzo00up>E2whl8_?xs~m&1|v@-mHLcuU2G0C%l%M$;&;PTO!a+H)5y9}-;i(2QZ6$( zyjpBe)!FHi4r3}lF{=b%j2}WqthNI0*0pYfRS_mb==kASfeM7p&jgXyZZ`(p2KVLT z3sR<8Xx0}WXT4%y#*FA1UD-VL;cwZgP3t0K(c0XY*#Jw3lpFMiG~0^^mAel`f-{$= zek4{N!n8@3-fMq1gDJr*6cH>l+?P(j)jN#?M^f$$zc4Eq|3G%vi%>X_3IwzFq$5l_ z?16Z#_`I@JM#Q<6Jz_zqmI}F+ z=D@|lub&GXyKS9JXrG&V4aQ`kk;Z5#9+C?A&iZi@_@>tv2->Ld-abltl>hNdJJQoY zSM#+6*$O)O;;_U!?YBm=&-J}*)V{QOG3k)Z4-q};HdqS}a8Pqe+EX{oOh7Ud*S$^O zT*&I)>0R{9(zIJ}3DQgBYsVA2hpXRk$ABMoR7=7pH+eqS_{WGLLMgbv&Z&!$KrQnd z97quc78mgMZaK7wpzXrmzG%OC=U2F!-1E!$3*7JC^Dkcd6~Rqz`W1oBZx8Jx@f5-aUc7L7zy5V?15BP(=0sbH9f3qL2Ghg@PE_g3HeuVJv zng6Eqf7Oc%kL;W@z+c4lDib%!=>^SYXLtV>&9CBmHU7U&)P=5Hwu_$C z v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" From bf58eb8322e3c7092c7dc5b49f636311493dfb63 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:04:10 +0100 Subject: [PATCH 355/409] Hound --- tests/integration/hosts/nuke/lib.py | 1 + tests/lib/testing_classes.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index baff675da7..70860b92b3 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -59,5 +59,6 @@ class NukeHostFixtures(HostFixtures): def skip_compare_folders(self): yield [] + class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 5e3b11cfc9..82cc321ae8 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,12 +8,10 @@ import tempfile import shutil import glob import platform -import requests import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler -from openpype.modules import ModulesManager class BaseTest: From 52073873526505251d0087286bdb8775f3c050d9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:04 +0100 Subject: [PATCH 356/409] Added AE test with old stored instances Release 3.15 will move to New Publisher --- .../test_publish_in_aftereffects_legacy.py | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py new file mode 100644 index 0000000000..8c7a74c60e --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AELocalPublishTestClass): + """Basic test case for publishing in AfterEffects + + Uses old Pyblish schema of created instances. + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", + "") + ] + + APP_GROUP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() From e68ad503e74907bc19a1e7ea71a6a07f675a7e4d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:47 +0100 Subject: [PATCH 357/409] Remove temporarily AE tests configured for Tray Publisher --- .../test_publish_in_aftereffects.py | 91 ------------------- ...test_publish_in_aftereffects_multiframe.py | 78 ---------------- 2 files changed, 169 deletions(-) delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py deleted file mode 100644 index 57d5a3e3f1..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ /dev/null @@ -1,91 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Uses generic TestCase to prepare fixtures for test data, testing DBs, - env vars. - - Opens AfterEffects, run publish on prepared workile. - - Test zip file sets 3 required env vars: - - HEADLESS_PUBLISH - this triggers publish immediately app is open - - IS_TEST - this differentiate between regular webpublish - - PYBLISH_TARGETS - - Then checks content of DB (if subset, version, representations were - created. - Checks tmp folder if all expected files were published. - - """ - PERSIST = False - - TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", - "test_aftereffects_publish.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "png_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py deleted file mode 100644 index 2d95eada99..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Should publish 10 frames - """ - PERSIST = True - - TEST_FILES = [ - ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", - "test_aftereffects_publish_multiframe.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "h264_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() From 6bba712b98d3a966c89d2589a8dfe6884a4d7391 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 16:10:02 +0100 Subject: [PATCH 358/409] nuke: viewer with Rec.709 is correctly returning full name --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2691b7447a..bde06e4fd7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string): viewer = split[1] display = split[0] elif "(" in viewer: - pattern = r"([\w\d\s]+).*[(](.*)[)]" + pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" result = re.findall(pattern, viewer) try: result = result.pop() From 9e40ef96a2b320e413a39281beb12f7b2d6d219a Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 15:29:45 +0000 Subject: [PATCH 359/409] Ensure Mongo database directory exists. --- tools/run_mongo.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index c64ff75969..85b94b0971 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan +New-Item -ItemType Directory -Force -Path $($dbpath) + Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null From 1fcc15583c1721636244682b3e06c21b4fda729a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:31:56 +0100 Subject: [PATCH 360/409] Fix AE legacy test --- .../hosts/aftereffects/test_publish_in_aftereffects_legacy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index 8c7a74c60e..5d0c15d63a 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -62,7 +62,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTest_taskMain", + additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, @@ -71,7 +71,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", From e2ec1457c9014c164ed6ec807416c66ae19a5950 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:35:04 +0100 Subject: [PATCH 361/409] Fix Nuke legacy test Legacy Nuke tests are not cleaning up `renders` folders. Branch with DL version disables Cleanup for now. --- tests/integration/hosts/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index 70860b92b3..96daec7427 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -57,7 +57,7 @@ class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def skip_compare_folders(self): - yield [] + yield ["renders"] class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): From 8b71066d9c33d782ca2520bce251fe733e4d8ad5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Dec 2022 16:53:09 +0100 Subject: [PATCH 362/409] :art: add menu and basic publishing support --- openpype/hosts/max/api/__init__.py | 2 - openpype/hosts/max/api/lib.py | 66 ++++++++++- openpype/hosts/max/api/menu.py | 64 +++++++++-- openpype/hosts/max/api/pipeline.py | 63 +++++----- openpype/hosts/max/api/plugin.py | 108 ++++++++++++++++++ .../max/plugins/create/create_pointcache.py | 21 ++++ openpype/hosts/max/startup/startup.ms | 3 +- 7 files changed, 284 insertions(+), 43 deletions(-) create mode 100644 openpype/hosts/max/api/plugin.py create mode 100644 openpype/hosts/max/plugins/create/create_pointcache.py diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index b6998df862..503afade73 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -4,10 +4,8 @@ from .pipeline import ( MaxHost ) -from .menu import OpenPypeMenu __all__ = [ "MaxHost", - "OpenPypeMenu" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index e50de85f68..8a57bb1bf6 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,2 +1,64 @@ -def imprint(attr, data): - ... +# -*- coding: utf-8 -*- +"""Library of functions useful for 3dsmax pipeline.""" +from pymxs import runtime as rt +from typing import Union + + +def imprint(node_name: str, data: dict) -> bool: + node = rt.getNodeByName(node_name) + if not node: + return False + + for k, v in data.items(): + rt.setUserProp(node, k, v) + + return True + + +def lsattr( + attr: str, + value: Union[str, None] = None, + root: Union[str, None] = None) -> list: + """List nodes having attribute with specified value. + + Args: + attr (str): Attribute name to match. + value (str, Optional): Value to match, of omitted, all nodes + with specified attribute are returned no matter of value. + root (str, Optional): Root node name. If omitted, scene root is used. + + Returns: + list of nodes. + """ + root = rt.rootnode if root is None else rt.getNodeByName(root) + + def output_node(node, nodes): + nodes.append(node) + for child in node.Children: + output_node(child, nodes) + + nodes = [] + output_node(root, nodes) + if not value: + return [n for n in nodes if rt.getUserProp(n, attr)] + + return [n for n in nodes if rt.getUserProp(n, attr) == value] + + +def read(container) -> dict: + data = {} + props = rt.getUserPropBuffer(container) + # this shouldn't happen but let's guard against it anyway + if not props: + return data + + for line in props.split("\r\n"): + key, value = line.split("=") + # if the line cannot be split we can't really parse it + if not key: + continue + data[key.strip()] = value.strip() + + data["instance_node"] = container + + return data diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py index 13ca503b4d..d1913c51e0 100644 --- a/openpype/hosts/max/api/menu.py +++ b/openpype/hosts/max/api/menu.py @@ -1,29 +1,70 @@ # -*- coding: utf-8 -*- """3dsmax menu definition of OpenPype.""" -from abc import ABCMeta, abstractmethod -import six from Qt import QtWidgets, QtCore from pymxs import runtime as rt from openpype.tools.utils import host_tools -@six.add_metaclass(ABCMeta) class OpenPypeMenu(object): + """Object representing OpenPype menu. + + This is using "hack" to inject itself before "Help" menu of 3dsmax. + For some reason `postLoadingMenus` event doesn't fire, and main menu + if probably re-initialized by menu templates, se we wait for at least + 1 event Qt event loop before trying to insert. + + """ def __init__(self): + super().__init__() self.main_widget = self.get_main_widget() + self.menu = None + + timer = QtCore.QTimer() + # set number of event loops to wait. + timer.setInterval(1) + timer.timeout.connect(self._on_timer) + timer.start() + + self._timer = timer + self._counter = 0 + + def _on_timer(self): + if self._counter < 1: + self._counter += 1 + return + + self._counter = 0 + self._timer.stop() + self.build_openpype_menu() @staticmethod def get_main_widget(): """Get 3dsmax main window.""" return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) - def get_main_menubar(self): + def get_main_menubar(self) -> QtWidgets.QMenuBar: """Get main Menubar by 3dsmax main window.""" return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] - def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + def get_or_create_openpype_menu( + self, name: str = "&OpenPype", + before: str = "&Help") -> QtWidgets.QAction: + """Create OpenPype menu. + + Args: + name (str, Optional): OpenPypep menu name. + before (str, Optional): Name of the 3dsmax main menu item to + add OpenPype menu before. + + Returns: + QtWidgets.QAction: OpenPype menu action. + + """ + if self.menu is not None: + return self.menu + menu_bar = self.get_main_menubar() menu_items = menu_bar.findChildren( QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) @@ -37,10 +78,13 @@ class OpenPypeMenu(object): help_action = item.menuAction() op_menu = QtWidgets.QMenu("&OpenPype") - menu_bar.insertMenu(before, op_menu) + menu_bar.insertMenu(help_action, op_menu) + + self.menu = op_menu return op_menu - def build_openpype_menu(self): + def build_openpype_menu(self) -> QtWidgets.QAction: + """Build items in OpenPype menu.""" openpype_menu = self.get_or_create_openpype_menu() load_action = QtWidgets.QAction("Load...", openpype_menu) load_action.triggered.connect(self.load_callback) @@ -63,18 +107,24 @@ class OpenPypeMenu(object): workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) workfiles_action.triggered.connect(self.workfiles_callback) openpype_menu.addAction(workfiles_action) + return openpype_menu def load_callback(self): + """Callback to show Loader tool.""" host_tools.show_loader(parent=self.main_widget) def publish_callback(self): + """Callback to show Publisher tool.""" host_tools.show_publisher(parent=self.main_widget) def manage_callback(self): + """Callback to show Scene Manager/Inventory tool.""" host_tools.show_subset_manager(parent=self.main_widget) def library_callback(self): + """Callback to show Library Loader tool.""" host_tools.show_library_loader(parent=self.main_widget) def workfiles_callback(self): + """Callback to show Workfiles tool.""" host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 2ee5989871..cef45193c4 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -5,6 +5,8 @@ import sys import logging import contextlib +import json + from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api from openpype.pipeline import ( @@ -12,7 +14,7 @@ from openpype.pipeline import ( register_loader_plugin_path, AVALON_CONTAINER_ID, ) -from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR from openpype.pipeline.load import any_outdated_containers @@ -32,6 +34,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" menu = None @@ -46,23 +49,10 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - log.info("Building menu ...") + # self._register_callbacks() self.menu = OpenPypeMenu() - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - self._register_callbacks() - - # register_event_callback("before.save", before_save) - # register_event_callback("save", on_save) - # register_event_callback("open", on_open) - # register_event_callback("new", on_new) - - # pyblish.api.register_callback( - # "instanceToggled", on_pyblish_instance_toggled - # ) - self._has_been_setup = True def has_unsaved_changes(self): @@ -70,7 +60,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return True def get_workfile_extensions(self): - return [".hip", ".hiplc", ".hipnc"] + return [".max"] def save_workfile(self, dst_path=None): rt.saveMaxFile(dst_path) @@ -88,17 +78,15 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return ls() def _register_callbacks(self): - for event in self._op_events.copy().values(): - if event is None: - continue + rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks")) - try: - rt.callbacks.removeScript(id=rt.name(event.name)) - except RuntimeError as e: - log.info(e) + rt.callbacks.addScript( + rt.Name("postLoadingMenus"), + self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks')) - rt.callbacks.addScript( - event.name, event.callback, id=rt.Name('OpenPype')) + def _deferred_menu_creation(self): + self.log.info("Building menu ...") + self.menu = OpenPypeMenu() @staticmethod def create_context_node(): @@ -128,12 +116,12 @@ attributes "OpenPypeContext" def update_context_data(self, data, changes): try: - context = rt.rootScene.OpenPypeContext.context + _ = rt.rootScene.OpenPypeContext.context except AttributeError: # context node doesn't exists - context = self.create_context_node() + self.create_context_node() - lib.imprint(context, data) + rt.rootScene.OpenPypeContext.context = json.dumps(data) def get_context_data(self): try: @@ -141,7 +129,9 @@ attributes "OpenPypeContext" except AttributeError: # context node doesn't exists context = self.create_context_node() - return lib.read(context) + if not context: + context = "{}" + return json.loads(context) def save_file(self, dst_path=None): # Force forwards slashes to avoid segfault @@ -149,5 +139,16 @@ attributes "OpenPypeContext" rt.saveMaxFile(dst_path) -def ls(): - ... \ No newline at end of file +def ls() -> list: + """Get all OpenPype instances.""" + objs = rt.objects + containers = [ + obj for obj in objs + if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + ] + + for container in sorted(containers, key=lambda name: container.name): + yield lib.read(container) + + + diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py new file mode 100644 index 0000000000..0f01c94ce1 --- /dev/null +++ b/openpype/hosts/max/api/plugin.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +"""3dsmax specific Avalon/Pyblish plugin definitions.""" +import sys +from pymxs import runtime as rt +import six +from abc import ABCMeta +from openpype.pipeline import ( + CreatorError, + Creator, + CreatedInstance +) +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr + + +class OpenPypeCreatorError(CreatorError): + pass + + +class MaxCreatorBase(object): + + @staticmethod + def cache_subsets(shared_data): + if shared_data.get("max_cached_subsets") is None: + shared_data["max_cached_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.get("creator_identifier") + if creator_id not in shared_data["max_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] + else: + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa + return shared_data + + @staticmethod + def create_instance_node(node_name: str, parent: str = ""): + parent_node = rt.getNodeByName(parent) if parent else rt.rootScene + if not parent_node: + raise OpenPypeCreatorError(f"Specified parent {parent} not found") + + container = rt.container(name=node_name) + container.Parent = parent_node + + return container + + +@six.add_metaclass(ABCMeta) +class MaxCreator(Creator, MaxCreatorBase): + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = rt.getCurrentSelection() + + instance_node = self.create_instance_node(subset_name) + instance_data["instance_node"] = instance_node.name + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + self._add_instance_to_context(instance) + imprint(instance_node.name, instance.data_to_store()) + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "max_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_node") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.getNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.delete(instance_node) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py new file mode 100644 index 0000000000..4c9ec7fb97 --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreatePointCache(plugin.MaxCreator): + identifier = "io.openpype.creators.max.pointcache" + label = "Point Cache" + family = "pointcache" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + from pymxs import runtime as rt + + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/startup/startup.ms b/openpype/hosts/max/startup/startup.ms index 94318afb01..aee40eb6bc 100644 --- a/openpype/hosts/max/startup/startup.ms +++ b/openpype/hosts/max/startup/startup.ms @@ -2,7 +2,8 @@ ( local sysPath = dotNetClass "System.IO.Path" local sysDir = dotNetClass "System.IO.Directory" - local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + local localScript = getThisScriptFilename() + local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py" python.ExecuteFile startup ) \ No newline at end of file From 1719e33b00807c336fdf6367460b9fb386a91930 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 17:20:32 +0100 Subject: [PATCH 363/409] flame: create vertically aligned subsets fix --- openpype/hosts/flame/api/plugin.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 26129ebaa6..7e012330cf 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,18 +596,19 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): - hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: + _hero_data = deepcopy(hero_data) + _hero_data.update({"heroTrack": False}) + if _in <= self.clip_in and _out >= self.clip_out: data_subset = hero_data["subset"] # add track index in case duplicity of names in hero data if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + _hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = hero_data + tag_hierarchy_data = _hero_data # add data to return data dict self.marker_data.update(tag_hierarchy_data) From b15b5832241fda937dfda287220a70e66f8bcb7e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 3 Dec 2022 03:28:38 +0000 Subject: [PATCH 364/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index fc687a1263..5e61ee3a6b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8" +__version__ = "3.14.9-nightly.1" From ee921e0bd4f384a3a94707d706f251e4aa997927 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Sat, 3 Dec 2022 17:04:02 +0700 Subject: [PATCH 365/409] Removed class variable and TODOs --- .../plugins/publish/validate_texture_workfiles.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index a25b80438d..a7ae02a2eb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -20,10 +20,6 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True - #TODO(2-rec): remove/change comment - # from presets - main_workfile_extensions = ['mra'] - def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] @@ -43,17 +39,19 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): formatting_data=formatting_data ) - @classmethod - def get_main_workfile_extensions(cls): + @staticmethod + def get_main_workfile_extensions(): project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) - #TODO: find better way? (depends on other plugin) try: extensions = (project_settings["standalonepublisher"] ["publish"] ["CollectTextures"] ["main_workfile_extensions"]) except KeyError: - extensions = cls.main_workfile_extensions + raise Exception("Setting 'Main workfile extensions' not found." + " The setting must be set for the" + " 'Collect Texture' publish plugin of the" + " 'Standalone Publish' tool.") return extensions From c071724bcf16e25e2139bad404227de3ef2c83f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 10:19:28 +0100 Subject: [PATCH 366/409] remove tk_library and tcl_librabry envs for nuke processes --- openpype/hosts/hiero/addon.py | 5 +++++ openpype/hosts/nuke/addon.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index f5bb94dbaa..1cc7a8637e 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -27,7 +27,12 @@ class HieroAddon(OpenPypeModule, IHostAddon): new_hiero_paths.append(norm_path) env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + # Remove auto screen scale factor for Qt + # - let Hiero decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 1c5d5c4005..9d25afe2b6 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -27,7 +27,12 @@ class NukeAddon(OpenPypeModule, IHostAddon): new_nuke_paths.append(norm_path) env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + # Remove auto screen scale factor for Qt + # - let Nuke decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] From c0b05e5846eecf7788d7ec3866023c83e4dded70 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:50:58 +0100 Subject: [PATCH 367/409] add break and better explanation of procedure --- openpype/hosts/flame/api/plugin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 7e012330cf..0d45792a38 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,6 +596,14 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): + """ + Since only one instance of hero clip is expected in + `self.vertical_clip_match`, this will loop only once + until none hero clip will be matched with hero clip. + + `tag_hierarchy_data` will be used only once for every + clip which is not hero clip. + """ _hero_data = deepcopy(hero_data) _hero_data.update({"heroTrack": False}) if _in <= self.clip_in and _out >= self.clip_out: @@ -609,6 +617,7 @@ class PublishableClip: _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag tag_hierarchy_data = _hero_data + break # add data to return data dict self.marker_data.update(tag_hierarchy_data) From ca1d518dd0dd026124b8879fdb446b34a170cc05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:53:03 +0100 Subject: [PATCH 368/409] comment improvement --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 0d45792a38..ca113fd98a 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -601,7 +601,7 @@ class PublishableClip: `self.vertical_clip_match`, this will loop only once until none hero clip will be matched with hero clip. - `tag_hierarchy_data` will be used only once for every + `tag_hierarchy_data` will be set only once for every clip which is not hero clip. """ _hero_data = deepcopy(hero_data) From 22e664c96e20bbf428b90b46d88383ac84e5d7e0 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 5 Dec 2022 14:45:18 +0000 Subject: [PATCH 369/409] Indicate sequence or single frame. --- openpype/hosts/maya/plugins/publish/collect_ass.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 7c9a1b76fb..3ce1f2ccf1 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,3 +1,5 @@ +import re + from maya import cmds import pyblish.api @@ -27,4 +29,10 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) + # Indicate to user that it'll be a single frame. + sequence = instance.data.get("exportSequence", False) + if not sequence: + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) + self.log.debug("data: {}".format(instance.data)) From 0151540fdd91f92c51b78a21a529c2ae913b69a9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:24:56 +0100 Subject: [PATCH 370/409] added collector which can add comment per instance --- openpype/plugins/publish/collect_comment.py | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 062142ace9..a2aef7fc1c 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -6,6 +6,28 @@ Provides: """ import pyblish.api +from openpype.lib.attribute_definitions import TextDef +from openpype.pipeline.publish import OpenPypePyblishPluginMixin + + +class CollectInstanceCommentDef( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + label = "Comment per instance" + targets = ["local"] + # Disable plugin by default + families = ["*"] + enabled = True + + def process(self, instance): + pass + + @classmethod + def get_attribute_defs(cls): + return [ + TextDef("comment", label="Comment") + ] class CollectComment(pyblish.api.ContextPlugin): From f7f5019401adf912cfba48d9d939af7492c9e3a6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:25:14 +0100 Subject: [PATCH 371/409] CollectComment plugin also store comment on each instance --- openpype/plugins/publish/collect_comment.py | 86 ++++++++++++++++++--- 1 file changed, 76 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a2aef7fc1c..a1b4e1364a 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -1,8 +1,26 @@ -""" -Requires: - None -Provides: - context -> comment (str) +"""Collect comment and add option to enter comment per instance. + +Combination of plugins. One define optional input for instances in Publisher +UI (CollectInstanceCommentDef) and second cares that each instance during +collection has available "comment" key in data (CollectComment). + +Plugin 'CollectInstanceCommentDef' define "comment" attribute which won't be +filled with any value if instance does not match families filter or when +plugin is disabled. + +Plugin 'CollectComment' makes sure that each instance in context has +available "comment" key in data which can be set to 'str' or 'None' if is not +set. +- In case instance already has filled comment the plugin's logic is skipped +- The comment is always set and value should be always 'str' even if is empty + +Why are separated: +- 'CollectInstanceCommentDef' can have specific settings to show comment + attribute only to defined families in publisher UI +- 'CollectComment' will run all the time + +Todos: + The comment per instance is not sent via farm. """ import pyblish.api @@ -31,11 +49,59 @@ class CollectInstanceCommentDef( class CollectComment(pyblish.api.ContextPlugin): - """This plug-ins displays the comment dialog box per default""" + """Collect comment per each instance. - label = "Collect Comment" - order = pyblish.api.CollectorOrder + Plugin makes sure each instance to publish has set "comment" in data so any + further plugin can use it directly. + """ + + label = "Collect Instance Comment" + order = pyblish.api.CollectorOrder + 0.49 def process(self, context): - comment = (context.data.get("comment") or "").strip() - context.data["comment"] = comment + context_comment = self.cleanup_comment(context.data.get("comment")) + # Set it back + context.data["comment"] = context_comment + for instance in context: + instance_label = str(instance) + # Check if comment is already set + instance_comment = self.cleanup_comment( + instance.data.get("comment")) + + # If comment on instance is not set then look for attributes + if not instance_comment: + attr_values = self.get_attr_values_from_data_for_plugin( + CollectInstanceCommentDef, instance.data + ) + instance_comment = self.cleanup_comment( + attr_values.get("comment") + ) + + # Use context comment if instance has all options of comment + # empty + if not instance_comment: + instance_comment = context_comment + + instance.data["comment"] = instance_comment + if instance_comment: + msg_end = " has comment set to: \"{}\"".format( + instance_comment) + else: + msg_end = " does not have set comment" + self.log.debug("Instance {} {}".format(instance_label, msg_end)) + + def cleanup_comment(self, comment): + """Cleanup comment value. + + Args: + comment (Union[str, None]): Comment value from data. + + Returns: + str: Cleaned comment which is stripped or empty string if input + was 'None'. + """ + + if comment: + return comment.strip() + return "" + From fd5ac3be1bc975d9e3329c835d583d269ec7c575 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:29:01 +0100 Subject: [PATCH 372/409] added settings for the attribute collector --- openpype/plugins/publish/collect_comment.py | 18 ++++++++++++++-- .../defaults/project_settings/global.json | 4 ++++ .../schemas/schema_global_publish.json | 21 +++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a1b4e1364a..db5a04681b 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -35,12 +35,26 @@ class CollectInstanceCommentDef( label = "Comment per instance" targets = ["local"] # Disable plugin by default - families = ["*"] - enabled = True + families = [] + enabled = False def process(self, instance): pass + @classmethod + def apply_settings(cls, project_setting, _): + plugin_settings = project_setting["global"]["publish"].get( + "collect_comment_per_instance" + ) + if not plugin_settings: + return + + if plugin_settings.get("enabled") is not None: + cls.enabled = plugin_settings["enabled"] + + if plugin_settings.get("families") is not None: + cls.families = plugin_settings["families"] + @classmethod def get_attribute_defs(cls): return [ diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 46b8b1b0c8..89d7cf08b7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -24,6 +24,10 @@ ], "skip_hosts_headless_publish": [] }, + "collect_comment_per_instance": { + "enabled": false, + "families": [] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 742437fbde..f2ada5fd8d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -60,6 +60,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "collect_comment_per_instance", + "label": "Collect comment per instance", + "checkbox_key": "enabled", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From 1f05a3952262a342a72e8308643c6d1a7a0ffdba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:09 +0100 Subject: [PATCH 373/409] use comment from instance where possible --- openpype/hosts/nuke/plugins/publish/extract_slate_frame.py | 2 +- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + .../ftrack/plugins/publish/integrate_ftrack_description.py | 2 +- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 2 +- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_legacy.py | 2 +- 8 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e7197b4fa8..06c086b10d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -298,7 +298,7 @@ class ExtractSlateFrame(publish.Extractor): def add_comment_slate_node(self, instance, node): - comment = instance.context.data.get("comment") + comment = instance.data["comment"] intent = instance.context.data.get("intent") if not isinstance(intent, dict): intent = { diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea44a24459..038ee4fc03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -38,7 +38,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") + self._comment = instance.data["comment"] self._deadline_user = context.data.get( "deadlineUser", getpass.getuser()) self._frame_start = int(instance.data["frameStart"]) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 249211e965..45688e8584 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -777,6 +777,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStartHandle": start - handle_start, "frameEndHandle": end + handle_end, + "comment": instance.data["comment"], "fps": fps, "source": source, "extendFrames": data.get("extendFrames"), diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index e7c265988e..6ed02bc8b6 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -38,7 +38,7 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return - comment = (instance.context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index ac3fa874e0..6776509dda 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = (context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317..fd8dfdece9 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -468,7 +468,7 @@ class ExtractBurnin(publish.Extractor): burnin_data.update({ "version": int(version), - "comment": context.data.get("comment") or "" + "comment": instance.data["comment"] }) intent_label = context.data.get("intent") or "" diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 7e4fc84658..57a642c635 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -772,7 +772,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": instance.data.get("fps", context.data.get("fps")) } diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..670b637faa 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -968,7 +968,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": context.data.get( "fps", instance.data.get("fps") From 5d24bfcf6318fa4fec1267612c933989fa2beb22 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:31 +0100 Subject: [PATCH 374/409] commit forgotten change of getting attribute values from plugin --- openpype/pipeline/publish/publish_plugins.py | 26 ++++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6e2be1ce2c..47dfaf6b98 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +import inspect from abc import ABCMeta import pyblish.api @@ -132,6 +133,25 @@ class OpenPypePyblishPluginMixin: ) return attribute_values + @staticmethod + def get_attr_values_from_data_for_plugin(plugin, data): + """Get attribute values for attribute definitions from data. + + Args: + plugin (Union[publish.api.Plugin, Type[publish.api.Plugin]]): The + plugin for which attributes are extracted. + data(dict): Data from instance or context. + """ + + if not inspect.isclass(plugin): + plugin = plugin.__class__ + + return ( + data + .get("publish_attributes", {}) + .get(plugin.__name__, {}) + ) + def get_attr_values_from_data(self, data): """Get attribute values for attribute definitions from data. @@ -139,11 +159,7 @@ class OpenPypePyblishPluginMixin: data(dict): Data from instance or context. """ - return ( - data - .get("publish_attributes", {}) - .get(self.__class__.__name__, {}) - ) + return self.get_attr_values_from_data_for_plugin(self.__class__, data) class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): From e6585be6772006ca748c8fbf5697f981bcd0de12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:35:03 +0100 Subject: [PATCH 375/409] fix missing method --- openpype/plugins/publish/collect_comment.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index db5a04681b..83609a04bd 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -62,7 +62,10 @@ class CollectInstanceCommentDef( ] -class CollectComment(pyblish.api.ContextPlugin): +class CollectComment( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): """Collect comment per each instance. Plugin makes sure each instance to publish has set "comment" in data so any From 6e520f564bfe58aa23c1430d175d30dccd95eb40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 09:52:36 +0100 Subject: [PATCH 376/409] removed redundant line --- openpype/plugins/publish/collect_comment.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 83609a04bd..12579cd957 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -121,4 +121,3 @@ class CollectComment( if comment: return comment.strip() return "" - From fffe1162b0ab68dbd3ea5e0e234c4b2b6cf51ac8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:00:16 +0100 Subject: [PATCH 377/409] fix import --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index db567f8b8f..37a5e90f86 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -7,7 +7,7 @@ from openpype.client import ( get_last_versions, get_representations, ) -from openpype.pipeline import get_representation_path_with_anatomy +from openpype.pipeline.load import get_representation_path_with_anatomy class CollectAudio(pyblish.api.ContextPlugin): From ad1380541ff48ebc495baacfcfd65c83952f81a7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:17:03 +0100 Subject: [PATCH 378/409] fix fields query --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 37a5e90f86..3a0ddb3281 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -121,7 +121,7 @@ class CollectAudio(pyblish.api.ContextPlugin): asset_docs = get_assets( project_name, asset_names=asset_names, - fields=["_id"] + fields=["_id", "name"] ) asset_id_by_name = {} From 89c5fdfb27c40e1a9797730830ef5ec8e38c4af7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 6 Dec 2022 14:31:00 +0100 Subject: [PATCH 379/409] Fix: Template path wrong normpath for cross platform --- openpype/pipeline/load/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 784d4628f3..bfa9fe07c7 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"] + template = repre_doc["data"]["template"].replace("\\", "/") except KeyError: raise InvalidRepresentationContext(( From 63b47efc51f92c8082ad76a3154a45703e80423c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:02:03 +0100 Subject: [PATCH 380/409] integrate thumbnail looks for thumbnail to multiple places --- .../plugins/publish/integrate_thumbnail.py | 58 +++++++++++++++++-- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f74c3d9609..cd472a7e6b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -102,8 +102,56 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): thumbnail_root ) + def _get_thumbnail_from_instance(self, instance): + # 1. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + + # 2. Look for thumbnail in published representations + published_repres = instance.data.get("published_representations") + path = self._get_thumbnail_path_from_published(published_repres) + if path and os.path.exists(path): + return path + + if path: + self.log.warning( + "Could not find published thumbnail path {}".format(path) + ) + + # 3. Look for thumbnail in "not published" representations + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _prepare_instances(self, context): - context_thumbnail_path = context.get("thumbnailPath") + context_thumbnail_path = context.data.get("thumbnailPath") valid_context_thumbnail = False if context_thumbnail_path and os.path.exists(context_thumbnail_path): valid_context_thumbnail = True @@ -122,8 +170,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): continue # Find thumbnail path on instance - thumbnail_path = self._get_instance_thumbnail_path( - published_repres) + thumbnail_path = self._get_thumbnail_from_instance(instance) if thumbnail_path: self.log.debug(( "Found thumbnail path for instance \"{}\"." @@ -157,7 +204,10 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): for repre_info in published_representations.values(): return repre_info["representation"]["parent"] - def _get_instance_thumbnail_path(self, published_representations): + def _get_thumbnail_path_from_published(self, published_representations): + if not published_representations: + return None + thumb_repre_doc = None for repre_info in published_representations.values(): repre_doc = repre_info["representation"] From 21411d50624385122d200b0d0317a54b26d83e50 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:32 +0100 Subject: [PATCH 381/409] store thumbnail path to instance data --- openpype/plugins/publish/extract_thumbnail_from_source.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da1213807..03df1455e2 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -73,6 +73,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Adding thumbnail representation: {}".format(new_repre) ) instance.data["representations"].append(new_repre) + instance.data["thumbnailPath"] = dst_filepath def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: From aa704b40eaa42bd3e4184dd6b754cfcf8f3069f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:52 +0100 Subject: [PATCH 382/409] change order of thumbnail path resolving --- .../plugins/publish/integrate_thumbnail.py | 40 ++++++++----------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index cd472a7e6b..f1455dc66b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -103,12 +103,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) def _get_thumbnail_from_instance(self, instance): - # 1. Look for thumbnail path on instance in 'thumbnailPath' - thumbnail_path = instance.data.get("thumbnailPath") - if thumbnail_path and os.path.exists(thumbnail_path): - return thumbnail_path - - # 2. Look for thumbnail in published representations + # 1. Look for thumbnail in published representations published_repres = instance.data.get("published_representations") path = self._get_thumbnail_path_from_published(published_repres) if path and os.path.exists(path): @@ -119,34 +114,33 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): "Could not find published thumbnail path {}".format(path) ) - # 3. Look for thumbnail in "not published" representations + # 2. Look for thumbnail in "not published" representations repres = instance.data.get("representations") - if not repres: - return None - thumbnail_repre = next( ( repre - for repre in repres + for repre in repres or [] if repre["name"] == "thumbnail" ), None ) - if not thumbnail_repre: - return None + if thumbnail_repre: + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") + filename = thumbnail_repre.get("files") + if isinstance(filename, (list, tuple, set)): + filename = filename[0] - filename = thumbnail_repre.get("files") - if not staging_dir or not filename: - return None + if staging_dir and filename: + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): + # 3. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): return thumbnail_path return None From 608afc35465bf17d541cc58e8922e36580949787 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:19:04 +0100 Subject: [PATCH 383/409] move unpublished representations logic to separated method --- .../plugins/publish/integrate_thumbnail.py | 57 ++++++++++++------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f1455dc66b..809a1782e0 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -115,28 +115,9 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) # 2. Look for thumbnail in "not published" representations - repres = instance.data.get("representations") - thumbnail_repre = next( - ( - repre - for repre in repres or [] - if repre["name"] == "thumbnail" - ), - None - ) - if thumbnail_repre: - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") - - filename = thumbnail_repre.get("files") - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - - if staging_dir and filename: - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): - return thumbnail_path + thumbnail_path = self._get_thumbnail_path_from_unpublished(instance) + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path # 3. Look for thumbnail path on instance in 'thumbnailPath' thumbnail_path = instance.data.get("thumbnailPath") @@ -223,6 +204,38 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): return None return os.path.normpath(path) + def _get_thumbnail_path_from_unpublished(self, instance): + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _integrate_thumbnails( self, filtered_instance_items, From c764dc20c641bb6ef58df1c4b29f7490b6417276 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:00:49 +0100 Subject: [PATCH 384/409] normalize paths when added to queue --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 1626bec6b6..ce7ef100c1 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -66,8 +66,8 @@ class FileTransaction(object): """Add a new file to transfer queue""" opts = {"mode": mode} - src = os.path.abspath(src) - dst = os.path.abspath(dst) + src = os.path.normpath(os.path.abspath(src)) + dst = os.path.normpath(os.path.abspath(dst)) if dst in self._transfers: queued_src = self._transfers[dst][0] From 18a9c5568426f6b67dc23d90742c6ac140e38800 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:21 +0100 Subject: [PATCH 385/409] skip if source and destination are the same paths --- openpype/lib/file_transaction.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index ce7ef100c1..4aedc62fb6 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -84,9 +84,11 @@ class FileTransaction(object): self._transfers[dst] = (src, opts) def process(self): - # Backup any existing files - for dst in self._transfers.keys(): + for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + continue + if os.path.exists(dst): # Backup original file # todo: add timestamp or uuid to ensure unique @@ -98,6 +100,12 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + self.log.debug( + "Source and destionation are same files {} -> {}".format( + src, dst)) + continue + self._create_folder_for_file(dst) if opts["mode"] == self.MODE_COPY: From 36dcab11c1c54cec6040456de8ec74ee20635111 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:52 +0100 Subject: [PATCH 386/409] formatting changes --- openpype/lib/file_transaction.py | 60 ++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 4aedc62fb6..2d706adaef 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -14,9 +14,9 @@ else: class FileTransaction(object): - """ + """File transaction with rollback options. - The file transaction is a three step process. + The file transaction is a three-step process. 1) Rename any existing files to a "temporary backup" during `process()` 2) Copy the files to final destination during `process()` @@ -39,14 +39,12 @@ class FileTransaction(object): Warning: Any folders created during the transfer will not be removed. - """ MODE_COPY = 0 MODE_HARDLINK = 1 def __init__(self, log=None): - if log is None: log = logging.getLogger("FileTransaction") @@ -63,7 +61,14 @@ class FileTransaction(object): self._backup_to_original = {} def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" + """Add a new file to transfer queue. + + Args: + src (str): Source path. + dst (str): Destination path. + mode (MODE_COPY, MODE_HARDLINK): Transfer mode. + """ + opts = {"mode": mode} src = os.path.normpath(os.path.abspath(src)) @@ -72,14 +77,15 @@ class FileTransaction(object): if dst in self._transfers: queued_src = self._transfers[dst][0] if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) + self.log.debug( + "File transfer was already in queue: {} -> {}".format( + src, dst)) return else: self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) + self.log.debug( + "Removed from queue: {} -> {} replaced by {} -> {}".format( + queued_src, dst, src, dst)) self._transfers[dst] = (src, opts) @@ -94,8 +100,8 @@ class FileTransaction(object): # todo: add timestamp or uuid to ensure unique backup = dst + ".bak" self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) os.rename(dst, backup) # Copy the files to transfer @@ -112,8 +118,8 @@ class FileTransaction(object): self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) + self.log.debug("Hardlinking file ... {} -> {}".format( + src, dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -124,23 +130,21 @@ class FileTransaction(object): try: os.remove(backup) except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) + self.log.error( + "Failed to remove backup file: {}".format(backup), + exc_info=True) def rollback(self): - errors = 0 - # Rollback any transferred files for path in self._transferred: try: os.remove(path) except OSError: errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) + self.log.error( + "Failed to rollback created file: {}".format(path), + exc_info=True) # Rollback the backups for backup, original in self._backup_to_original.items(): @@ -148,13 +152,15 @@ class FileTransaction(object): os.rename(backup, original) except OSError: errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) + self.log.error( + "Failed to restore original file: {} -> {}".format( + backup, original), + exc_info=True) if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) + self.log.error( + "{} errors occurred during rollback.".format(errors), + exc_info=True) six.reraise(*sys.exc_info()) @property From ee71a051b6066011fc4cfe8cd261de8fe9081fad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:05:14 +0100 Subject: [PATCH 387/409] removed redundant check of directory --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 2d706adaef..6f285d73a8 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -92,7 +92,7 @@ class FileTransaction(object): def process(self): # Backup any existing files for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: continue if os.path.exists(dst): @@ -106,7 +106,7 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: self.log.debug( "Source and destionation are same files {} -> {}".format( src, dst)) From 9f2cd89e1521bca7af39927d09655867a082456f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:06:52 +0100 Subject: [PATCH 388/409] remove unused variable --- openpype/lib/file_transaction.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 6f285d73a8..f265b8815c 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -91,18 +91,17 @@ class FileTransaction(object): def process(self): # Backup any existing files - for dst, (src, opts) in self._transfers.items(): - if dst == src: + for dst, (src, _) in self._transfers.items(): + if dst == src or not os.path.exists(dst): continue - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug( - "Backup existing file: {} -> {}".format(dst, backup)) - os.rename(dst, backup) + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) + os.rename(dst, backup) # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): From 2c55ee55c266dbfe90394918e604ed87c51d619e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:09:54 +0100 Subject: [PATCH 389/409] remove source and destination check from integrate --- openpype/plugins/publish/integrate.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 57a642c635..6a85a87129 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,9 +291,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - if src == dst: - continue - # todo: add support for hardlink transfers file_transactions.add(src, dst) From 1fc8528795d971d54aa4cebd4e74032828ffd712 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:11:45 +0000 Subject: [PATCH 390/409] Remove redundant infected code --- openpype/hosts/maya/api/lib.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index a415f625c0..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3436,28 +3436,3 @@ def iter_visible_nodes_in_range(nodes, start, end): # If no more nodes to process break the frame iterations.. if not node_dependencies: break - - -@contextlib.contextmanager -def selection(*nodes): - """Execute something with a specific Maya selection. - - Example: - .. code-block:: python - - cmds.select('side') - print(cmds.ls(sl=True)) - # ['side'] - - with selection('top', 'lambert1'): - print(cmds.ls) - # ['top', 'lambert1'] - - print(cmds.ls(sl=True)) - # ['side'] - - """ - current = cmds.ls(sl=True) - cmds.select(*nodes, noExpand=True) - yield - cmds.select(current, noExpand=True) \ No newline at end of file From fd6345743c330e7d6f2cd80a7b333851e7c1f75e Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:12:46 +0000 Subject: [PATCH 391/409] Creator settings --- .../hosts/maya/plugins/create/create_ass.py | 48 ++++++--- .../defaults/project_settings/maya.json | 19 +++- .../schemas/schema_maya_create.json | 102 +++++++++++++++++- 3 files changed, 148 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 39f226900a..903a8ef0cf 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from openpype.hosts.maya.api import ( lib, plugin @@ -9,12 +7,27 @@ from maya import cmds class CreateAss(plugin.Creator): - """Arnold Archive""" + """Arnold Scene Source""" name = "ass" - label = "Ass StandIn" + label = "Arnold Scene Source" family = "ass" icon = "cube" + exportSequence = False + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False def __init__(self, *args, **kwargs): super(CreateAss, self).__init__(*args, **kwargs) @@ -22,16 +35,27 @@ class CreateAss(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - # Vertex colors with the geometry - self.data["exportSequence"] = False + self.data["exportSequence"] = self.exportSequence + self.data["expandProcedurals"] = self.expandProcedurals + self.data["motionBlur"] = self.motionBlur + self.data["motionBlurKeys"] = self.motionBlurKeys + self.data["motionBlurLength"] = self.motionBlurLength + + # Masks + self.data["maskOptions"] = self.maskOptions + self.data["maskCamera"] = self.maskCamera + self.data["maskLight"] = self.maskLight + self.data["maskShape"] = self.maskShape + self.data["maskShader"] = self.maskShader + self.data["maskOverride"] = self.maskOverride + self.data["maskDriver"] = self.maskDriver + self.data["maskFilter"] = self.maskFilter + self.data["maskColor_manager"] = self.maskColor_manager + self.data["maskOperator"] = self.maskOperator def process(self): instance = super(CreateAss, self).process() - # data = OrderedDict(**self.data) - - - nodes = list() if (self.options or {}).get("useSelection"): @@ -42,7 +66,3 @@ class CreateAss(plugin.Creator): assContent = cmds.sets(name="content_SET") assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a..a74f8e5827 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -171,7 +171,22 @@ "enabled": true, "defaults": [ "Main" - ] + ], + "exportSequence": false, + "expandProcedurals": false, + "motionBlur": true, + "motionBlurKeys": 2, + "motionBlurLength": 0.5, + "maskOptions": false, + "maskCamera": false, + "maskLight": false, + "maskShape": false, + "maskShader": false, + "maskOverride": false, + "maskDriver": false, + "maskFilter": false, + "maskColor_manager": false, + "maskOperator": false }, "CreateAssembly": { "enabled": true, @@ -1007,4 +1022,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index bc6520474d..6cf11e4cea 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -200,7 +200,103 @@ } ] }, - + { + "type": "dict", + "collapsible": true, + "key": "CreateAss", + "label": "Create Ass", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "exportSequence", + "label": "Export Sequence" + }, + { + "type": "boolean", + "key": "expandProcedurals", + "label": "Expand Procedurals" + }, + { + "type": "boolean", + "key": "motionBlur", + "label": "Motion Blur" + }, + { + "type": "number", + "key": "motionBlurKeys", + "label": "Motion Blur Keys", + "minimum": 0 + }, + { + "type": "number", + "key": "motionBlurLength", + "label": "Motion Blur Length", + "decimal": 3 + }, + { + "type": "boolean", + "key": "maskOptions", + "label": "Mask Options" + }, + { + "type": "boolean", + "key": "maskCamera", + "label": "Mask Camera" + }, + { + "type": "boolean", + "key": "maskLight", + "label": "Mask Light" + }, + { + "type": "boolean", + "key": "maskShape", + "label": "Mask Shape" + }, + { + "type": "boolean", + "key": "maskShader", + "label": "Mask Shader" + }, + { + "type": "boolean", + "key": "maskOverride", + "label": "Mask Override" + }, + { + "type": "boolean", + "key": "maskDriver", + "label": "Mask Driver" + }, + { + "type": "boolean", + "key": "maskFilter", + "label": "Mask Filter" + }, + { + "type": "boolean", + "key": "maskColor_manager", + "label": "Mask Color Manager" + }, + { + "type": "boolean", + "key": "maskOperator", + "label": "Mask Operator" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", @@ -217,10 +313,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAss", - "label": "Create Ass" - }, { "key": "CreateAssembly", "label": "Create Assembly" From 3cf47e25626675ec00d1202fbaf99d1ad4b95030 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:12 +0000 Subject: [PATCH 392/409] Collect camera from objectset if present. --- .../hosts/maya/plugins/publish/collect_ass.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 3ce1f2ccf1..69af4c777d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -8,6 +8,7 @@ import pyblish.api class CollectAssData(pyblish.api.InstancePlugin): """Collect Ass data.""" + # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] @@ -25,7 +26,8 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - assert len(members) == 1, "You have multiple proxy meshes, please only use one" + msg = "You have multiple proxy meshes, please only use one" + assert len(members) == 1, msg instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) @@ -35,4 +37,17 @@ class CollectAssData(pyblish.api.InstancePlugin): group = re.compile(r" \[.*\]") instance.data["label"] = group.sub("", instance.data["label"]) + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + camera = renderable[0] + for node in instance.data["setMembers"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + self.log.debug("data: {}".format(instance.data)) From 76bf9bf4de319182704b57687b9d97480e8b003b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:42 +0000 Subject: [PATCH 393/409] Working extractor --- .../hosts/maya/plugins/publish/extract_ass.py | 149 ++++++++++++------ 1 file changed, 102 insertions(+), 47 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 5c21a4ff08..b6bd4a2e22 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,20 +1,17 @@ import os +import contextlib from maya import cmds +import arnold from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection class ExtractAssStandin(publish.Extractor): - """Extract the content of the instance to a ass file + """Extract the content of the instance to a ass file""" - Things to pay attention to: - - If animation is toggled, are the frames correct - - - """ - - label = "Ass Standin (.ass)" + label = "Arnold Scene Source (.ass)" hosts = ["maya"] families = ["ass"] asciiAss = False @@ -28,50 +25,59 @@ class ExtractAssStandin(publish.Extractor): filenames = list() file_path = os.path.join(staging_dir, filename) + kwargs = { + "filename": file_path, + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": self.get_ass_export_mask(instance) + } + + # Motion blur + motion_blur = instance.data.get("motionBlur", True) + motion_blur_keys = instance.data.get("motionBlurKeys", 2) + motion_blur_length = instance.data.get("motionBlurLength", 0.5) + # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with maintained_selection(): - self.log.info("Writing: {}".format(instance.data["setMembers"])) - cmds.select(instance.data["setMembers"], noExpand=True) + with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with maintained_selection(): + self.log.info( + "Writing: {}".format(instance.data["setMembers"]) + ) + cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") + if sequence: + self.log.info("Extracting ass sequence") - # Collect the start and end including handles - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 0) + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) - exported_files = cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=self.asciiAss, - shadowLinks=True, - lightLinks=True, - boundingBox=True, - startFrame=start, - endFrame=end, - frameStep=step - ) - for file in exported_files: - filenames.append(os.path.split(file)[1]) - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=False, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.info("Exported: {}".format(filenames)) + else: + self.log.info("Extracting ass") + cmds.arnoldExportAss(**kwargs) + self.log.info("Extracted {}".format(filename)) + filenames = filename + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart" + ] + for key in optionals: + instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -84,9 +90,58 @@ class ExtractAssStandin(publish.Extractor): } if sequence: - representation['frameStart'] = start + representation['frameStart'] = kwargs["start"] instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) + + #This should be separated out as library function that takes some + #attributes to modify with values. The function then resets to original + #values. + @contextlib.contextmanager + def motion_blur_ctx(self, force, keys, length): + if not force: + yield + return + + cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") + ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") + clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") + + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) + if keys > 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) + if length >= 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) + + try: + yield + finally: + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) + + #This should be refactored to lib. probably just need the node_types directionary + def get_ass_export_mask(self, instance): + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + return mask From 31d14cb70fb19e80b96c01b649e8a2c8ff34953c Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:16:11 +0000 Subject: [PATCH 394/409] Remove redundant lib --- openpype/hosts/maya/api/mtoa.py | 187 -------------------------------- 1 file changed, 187 deletions(-) delete mode 100644 openpype/hosts/maya/api/mtoa.py diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py deleted file mode 100644 index d19fecf6b5..0000000000 --- a/openpype/hosts/maya/api/mtoa.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -"""Library of classes and functions deadling with MtoA functionality.""" -import tempfile -import contextlib - -import clique -import pyblish.api - -from maya import cmds - -from openpype.pipeline import publish -from .viewport import vp2_paused_context -from .lib import selection - - -class _AssExtractor(publish.Extractor): - """Base class for ASS type extractors.""" - - order = pyblish.api.ExtractorOrder + 0.01 - hosts = ["maya"] - - def get_ass_export_mask(self, maya_set): - import arnold # noqa - mask = arnold.AI_NODE_ALL - - ai_masks = {"options": {"value": arnold.AI_NODE_OPTIONS, - "default": False}, - "camera": {"value": arnold.AI_NODE_CAMERA, - "default": False}, - "light": {"value": arnold.AI_NODE_LIGHT, - "default": False}, - "shape": {"value": arnold.AI_NODE_SHAPE, - "default": True}, - "shader": {"value": arnold.AI_NODE_SHADER, - "default": True}, - "override": {"value": arnold.AI_NODE_OVERRIDE, - "default": False}, - "driver": {"value": arnold.AI_NODE_DRIVER, - "default": False}, - "filter": {"value": arnold.AI_NODE_FILTER, - "default": False}, - "color_manager": {"value": arnold.AI_NODE_COLOR_MANAGER, - "default": True}, - "operator": {"value": arnold.AI_NODE_OPERATOR, - "default": True}} - - for mask_name, mask_data in ai_masks.items(): - attr = "inf_ass_export_{}".format(mask_name) - - submask = self.get_set_attr("{}.{}".format(maya_set, attr), - default=mask_data["default"]) - - if not submask: - mask = mask ^ mask_data["value"] - - return mask - - def process(self, instance): - #What is a dry run? - #ass.rr seems like an abstract variable. Needs clarification. - dry_run = instance.data.get("ass.rr") - - staging_dir = self.staging_dir(instance) - sequence = instance.data.get("exportSequence", False) - - if not cmds.pluginInfo("mtoa", query=True, loaded=True): - cmds.loadPlugin("mtoa") - - # Export to a temporal path - export_dir = instance.context.data["stagingDir"] - export_path = tempfile.NamedTemporaryFile(suffix=".ass", - dir=export_dir, - delete=False) - - set_ = instance.data["set"] - kwargs = {"shadowLinks": 1, - "lightLinks": 1, - "boundingBox": True, - "selected": True, - "f": export_path.name} - - # Animation - - if sequence: - mask = self.get_ass_export_mask(set_) - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 1.0) - if start is not None: - kwargs["startFrame"] = float(start) - kwargs["endFrame"] = float(end) - kwargs["frameStep"] = float(step) - else: - mask = 44 - - #get/set should be plugin options. - # Generic options - if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), - False): - kwargs["expandProcedurals"] = True - - if self.get_set_attr("{}.inf_ass_fullpath".format(set_), - True): - kwargs["fullPath"] = True - - kwargs["mask"] = mask - - # Motion blur - mb = self.get_set_attr("{}.inf_ass_motion_blur".format(set_), False) - keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) - length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) - - #Targets should already be collected - targets = self.get_targets(instance) - - _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) - _sorted_kwargs = ["{}={!r}".format(x, y) for x, y in _sorted_kwargs] - - if not dry_run: - self.log.debug("Running command: cmds.arnoldExportAss({})" - .format(", ".join(_sorted_kwargs))) - #There should be a context for not updating the viewport from - #pointcache extraction. - with vp2_paused_context(): - with selection(targets): - with self.motion_blur_ctx(mb, keys, length): - result = cmds.arnoldExportAss(**kwargs) - else: - instance.data["assExportKwargs"] = kwargs - start = kwargs.get("startFrame") - end = kwargs.get("endFrame") - result = [] - - range_ = [0] - if start is not None: - range_ = range(int(start), int(end) + 1) - - for i in range_: - #padding amount should be configurable. 3 does not seems - #enough as default. - fp = "{}.{:03d}.ass".format(export_path.name, i) - with open(fp, "w"): - pass - result.append(fp) - - #Whether its a sequence or not, should already have been determined. - if len(result) == 1: - filepath = result[0] - else: - collection = clique.assemble(result)[0][0] - filepath = collection.format() - - # Register the file - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'ass', - 'ext': 'ass', - 'files': filepath, - "stagingDir": staging_dir - } - - instance.data["representations"].append(representation) - - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) From b974c675de93610b5dadda038c1e0c59526b3726 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 21:26:33 +0000 Subject: [PATCH 395/409] Refactor - use lib for attribute context - remove mask class method --- .../hosts/maya/plugins/publish/extract_ass.py | 96 +++++++------------ 1 file changed, 37 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index b6bd4a2e22..7fc0cc1b2f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,11 +1,10 @@ import os -import contextlib from maya import cmds import arnold from openpype.pipeline import publish -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.hosts.maya.api.lib import maintained_selection, attribute_values class ExtractAssStandin(publish.Extractor): @@ -25,6 +24,40 @@ class ExtractAssStandin(publish.Extractor): filenames = list() file_path = os.path.join(staging_dir, filename) + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + values = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + + # Write out .ass file kwargs = { "filename": file_path, "selected": True, @@ -34,17 +67,11 @@ class ExtractAssStandin(publish.Extractor): "boundingBox": True, "expandProcedurals": instance.data.get("expandProcedurals", False), "camera": instance.data["camera"], - "mask": self.get_ass_export_mask(instance) + "mask": mask } - # Motion blur - motion_blur = instance.data.get("motionBlur", True) - motion_blur_keys = instance.data.get("motionBlurKeys", 2) - motion_blur_length = instance.data.get("motionBlurLength", 0.5) - - # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with attribute_values(values): with maintained_selection(): self.log.info( "Writing: {}".format(instance.data["setMembers"]) @@ -96,52 +123,3 @@ class ExtractAssStandin(publish.Extractor): self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) - - #This should be separated out as library function that takes some - #attributes to modify with values. The function then resets to original - #values. - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) - - #This should be refactored to lib. probably just need the node_types directionary - def get_ass_export_mask(self, instance): - mask = arnold.AI_NODE_ALL - - node_types = { - "options": arnold.AI_NODE_OPTIONS, - "camera": arnold.AI_NODE_CAMERA, - "light": arnold.AI_NODE_LIGHT, - "shape": arnold.AI_NODE_SHAPE, - "shader": arnold.AI_NODE_SHADER, - "override": arnold.AI_NODE_OVERRIDE, - "driver": arnold.AI_NODE_DRIVER, - "filter": arnold.AI_NODE_FILTER, - "color_manager": arnold.AI_NODE_COLOR_MANAGER, - "operator": arnold.AI_NODE_OPERATOR - } - - for key in node_types.keys(): - if instance.data.get("mask" + key.title()): - mask = mask ^ node_types[key] - - return mask From 1c985ca0015ce4e3161e18a91205a4590401e243 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 6 Dec 2022 23:45:51 +0100 Subject: [PATCH 396/409] :bug: fix publishing of alembics --- openpype/hosts/max/__init__.py | 2 +- openpype/hosts/max/api/__init__.py | 11 +- openpype/hosts/max/api/lib.py | 78 ++++++++++++-- openpype/hosts/max/api/pipeline.py | 9 +- openpype/hosts/max/api/plugin.py | 15 +-- .../max/plugins/publish/collect_workfile.py | 63 +++++++++++ .../max/plugins/publish/extract_pointcache.py | 100 ++++++++++++++++++ .../plugins/publish/validate_scene_saved.py | 19 ++++ 8 files changed, 272 insertions(+), 25 deletions(-) create mode 100644 openpype/hosts/max/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/max/plugins/publish/extract_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/validate_scene_saved.py diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py index 8da0e0ee42..9a5af8258c 100644 --- a/openpype/hosts/max/__init__.py +++ b/openpype/hosts/max/__init__.py @@ -7,4 +7,4 @@ from .addon import ( __all__ = ( "MaxAddon", "MAX_HOST_DIR", -) \ No newline at end of file +) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 503afade73..26190dcfb8 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -2,10 +2,19 @@ """Public API for 3dsmax""" from .pipeline import ( - MaxHost + MaxHost, ) +from .lib import( + maintained_selection, + lsattr, + get_all_children +) + __all__ = [ "MaxHost", + "maintained_selection", + "lsattr", + "get_all_children" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index 8a57bb1bf6..9256ca9ac1 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,7 +1,13 @@ # -*- coding: utf-8 -*- """Library of functions useful for 3dsmax pipeline.""" +import json +import six from pymxs import runtime as rt from typing import Union +import contextlib + + +JSON_PREFIX = "JSON::" def imprint(node_name: str, data: dict) -> bool: @@ -10,7 +16,10 @@ def imprint(node_name: str, data: dict) -> bool: return False for k, v in data.items(): - rt.setUserProp(node, k, v) + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) return True @@ -39,10 +48,13 @@ def lsattr( nodes = [] output_node(root, nodes) - if not value: - return [n for n in nodes if rt.getUserProp(n, attr)] - - return [n for n in nodes if rt.getUserProp(n, attr) == value] + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] def read(container) -> dict: @@ -53,12 +65,58 @@ def read(container) -> dict: return data for line in props.split("\r\n"): - key, value = line.split("=") - # if the line cannot be split we can't really parse it - if not key: + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it continue - data[key.strip()] = value.strip() - data["instance_node"] = container + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index cef45193c4..4f8271fb7e 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging -import contextlib import json @@ -101,12 +99,12 @@ attributes "OpenPypeContext" ( context type: #string ) - + rollout params "OpenPype Parameters" ( editText editTextContext "Context" type: #string ) -) +) """) attr = rt.execute(create_attr_script) @@ -149,6 +147,3 @@ def ls() -> list: for container in sorted(containers, key=lambda name: container.name): yield lib.read(container) - - - diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py index 0f01c94ce1..4788bfd383 100644 --- a/openpype/hosts/max/api/plugin.py +++ b/openpype/hosts/max/api/plugin.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- """3dsmax specific Avalon/Pyblish plugin definitions.""" -import sys from pymxs import runtime as rt import six from abc import ABCMeta @@ -25,12 +24,12 @@ class MaxCreatorBase(object): shared_data["max_cached_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: - creator_id = i.get("creator_identifier") + creator_id = rt.getUserProp(i, "creator_identifier") if creator_id not in shared_data["max_cached_subsets"]: - shared_data["houdini_cached_subsets"][creator_id] = [i] + shared_data["max_cached_subsets"][creator_id] = [i.name] else: shared_data[ - "houdini_cached_subsets"][creator_id].append(i) # noqa + "max_cached_subsets"][creator_id].append(i.name) # noqa return shared_data @staticmethod @@ -61,8 +60,12 @@ class MaxCreator(Creator, MaxCreatorBase): instance_data, self ) + for node in self.selected_nodes: + node.Parent = instance_node + self._add_instance_to_context(instance) imprint(instance_node.name, instance.data_to_store()) + return instance def collect_instances(self): @@ -70,7 +73,7 @@ class MaxCreator(Creator, MaxCreatorBase): for instance in self.collection_shared_data[ "max_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( - read(instance), self + read(rt.getNodeByName(instance)), self ) self._add_instance_to_context(created_instance) @@ -98,7 +101,7 @@ class MaxCreator(Creator, MaxCreatorBase): instance_node = rt.getNodeByName( instance.data.get("instance_node")) if instance_node: - rt.delete(instance_node) + rt.delete(rt.getNodeByName(instance_node)) self._remove_instance_from_context(instance) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..7112337575 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io, KnownPublishError + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 0000000000..904c1656da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 0000000000..6392b12d11 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from openpype.pipeline.publish import RepairAction +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label) From d29a3ca4379a88202bc4279fe8966d87a3509820 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:17:21 +0100 Subject: [PATCH 397/409] :art: simple loader for alembics --- .../hosts/max/plugins/load/load_pointcache.py | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 openpype/hosts/max/plugins/load/load_pointcache.py diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py new file mode 100644 index 0000000000..150206b8b8 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Simple alembic loader for 3dsmax. + +Because of limited api, alembics can be only loaded, but not easily updated. + +""" +import os +from openpype.pipeline import ( + load, + get_representation_path, +) + + +class AbcLoader(load.LoaderPlugin): + """Alembic loader.""" + + families = ["model", "animation", "pointcache"] + label = "Load Alembic" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_export_cmd = (f""" +AlembicImport.ImportToRoot = false + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + rt.execute(abc_export_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + container_name = f"{name}_CON" + container = rt.container(name=container_name) + abc_container.Parent = container + + return container + + def remove(self, container): + from pymxs import runtime as rt + + node = container["node"] + rt.delete(node) From 7327334226c45fc0291c3b08e041cb8fc7fa328b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:20:56 +0100 Subject: [PATCH 398/409] :rotating_light: fix :dog: --- openpype/hosts/max/api/__init__.py | 2 +- openpype/hosts/max/api/pipeline.py | 6 +----- openpype/hosts/max/plugins/publish/collect_workfile.py | 2 +- openpype/hosts/max/plugins/publish/validate_scene_saved.py | 3 +-- openpype/hosts/max/startup/startup.py | 1 - 5 files changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 26190dcfb8..92097cc98b 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -6,7 +6,7 @@ from .pipeline import ( ) -from .lib import( +from .lib import ( maintained_selection, lsattr, get_all_children diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 4f8271fb7e..f3cdf245fb 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -15,11 +15,7 @@ from openpype.pipeline import ( from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR -from openpype.pipeline.load import any_outdated_containers -from openpype.lib import ( - register_event_callback, - emit_event, -) + from pymxs import runtime as rt # noqa log = logging.getLogger("openpype.hosts.max") diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py index 7112337575..3500b2735c 100644 --- a/openpype/hosts/max/plugins/publish/collect_workfile.py +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -4,7 +4,7 @@ import os import pyblish.api from pymxs import runtime as rt -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py index 6392b12d11..8506b17315 100644 --- a/openpype/hosts/max/plugins/publish/validate_scene_saved.py +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishValidationError -from openpype.pipeline.publish import RepairAction +from openpype.pipeline import PublishValidationError from pymxs import runtime as rt diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py index afcbd2d132..37bcef5db1 100644 --- a/openpype/hosts/max/startup/startup.py +++ b/openpype/hosts/max/startup/startup.py @@ -4,4 +4,3 @@ from openpype.pipeline import install_host host = MaxHost() install_host(host) - From 75606777695064693dca411bd47455988a669c14 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:23:22 +0100 Subject: [PATCH 399/409] :rotating_light: fix hound round 2 --- openpype/hosts/max/plugins/create/create_pointcache.py | 3 ++- openpype/hosts/max/plugins/load/load_pointcache.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index 4c9ec7fb97..c08b0dedfe 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -18,4 +18,5 @@ class CreatePointCache(plugin.MaxCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = rt.getNodeByName(instance.get("instance_node")) + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py index 150206b8b8..285d84b7b6 100644 --- a/openpype/hosts/max/plugins/load/load_pointcache.py +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -6,8 +6,7 @@ Because of limited api, alembics can be only loaded, but not easily updated. """ import os from openpype.pipeline import ( - load, - get_representation_path, + load ) From ad95165765bc0841305888af177888bfaf7d1357 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:25:15 +0100 Subject: [PATCH 400/409] :rotating_light: fix hound round 3 --- openpype/hosts/max/plugins/create/create_pointcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index c08b0dedfe..32f0838471 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -11,9 +11,9 @@ class CreatePointCache(plugin.MaxCreator): icon = "gear" def create(self, subset_name, instance_data, pre_create_data): - from pymxs import runtime as rt + # from pymxs import runtime as rt - instance = super(CreatePointCache, self).create( + _ = super(CreatePointCache, self).create( subset_name, instance_data, pre_create_data) # type: CreatedInstance From f4391cbeb2245e132f561cbdc89b8aefc88b06cb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:39:28 +0100 Subject: [PATCH 401/409] :recycle: add 3dsmax 2023 variant --- .../system_settings/applications.json | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index a4db0dd327..b8aa8cec74 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -114,6 +114,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "max", + "environment": { + "ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup" + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "flame": { "enabled": true, "label": "Flame", @@ -1309,35 +1338,6 @@ } } }, - "3dsmax": { - "enabled": true, - "label": "3ds max", - "icon": "{}/app_icons/3dsmax.png", - "host_name": "3dsmax", - "environment": { - - }, - "variants": { - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "3DSMAX_VERSION": "2023" - } - } - } - }, "djvview": { "enabled": true, "label": "DJV View", From 99930c2856ac882f8029000a6825911d48ac68c6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 7 Dec 2022 03:30:40 +0000 Subject: [PATCH 402/409] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 5e61ee3a6b..443c76544b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.1" +__version__ = "3.14.9-nightly.2" From d7cc795d1fdc2fd635488bcc5282217e4b36b9c5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 16:59:43 +0800 Subject: [PATCH 403/409] gltf extractor for Maya --- openpype/hosts/maya/plugins/publish/collect_gltf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index bbc4e31f92..bb37fe3a7e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -13,5 +13,5 @@ class CollectGLTF(pyblish.api.InstancePlugin): if not instance.data.get("families"): instance.data["families"] = [] - if "fbx" not in instance.data["families"]: + if "gltf" not in instance.data["families"]: instance.data["families"].append("gltf") From 367b7b262ea9131d7e9c184c704b852590dbb887 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Thu, 8 Dec 2022 13:06:10 +0100 Subject: [PATCH 404/409] changes for better support --- openpype/lib/path_templates.py | 2 +- openpype/pipeline/load/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index b160054e38..0f99efb430 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -422,7 +422,7 @@ class TemplateResult(str): cls = self.__class__ return cls( - os.path.normpath(self), + os.path.normpath(self.replace("\\", "/")), self.template, self.solved, self.used_values, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index bfa9fe07c7..784d4628f3 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"].replace("\\", "/") + template = repre_doc["data"]["template"] except KeyError: raise InvalidRepresentationContext(( From 20400b51c995db486480636783336d00e0a8c162 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:11:26 +0100 Subject: [PATCH 405/409] :recycle: remove `exportSequence` flag --- .../hosts/maya/plugins/create/create_ass.py | 4 +-- .../hosts/maya/plugins/publish/collect_ass.py | 13 ++++--- .../hosts/maya/plugins/publish/extract_ass.py | 36 +++++++------------ .../defaults/project_settings/maya.json | 1 - 4 files changed, 19 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 903a8ef0cf..935a068ca5 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -13,7 +13,6 @@ class CreateAss(plugin.Creator): label = "Arnold Scene Source" family = "ass" icon = "cube" - exportSequence = False expandProcedurals = False motionBlur = True motionBlurKeys = 2 @@ -35,7 +34,6 @@ class CreateAss(plugin.Creator): # Add animation data self.data.update(lib.collect_animation_data()) - self.data["exportSequence"] = self.exportSequence self.data["expandProcedurals"] = self.expandProcedurals self.data["motionBlur"] = self.motionBlur self.data["motionBlurKeys"] = self.motionBlurKeys @@ -56,7 +54,7 @@ class CreateAss(plugin.Creator): def process(self): instance = super(CreateAss, self).process() - nodes = list() + nodes = [] if (self.options or {}).get("useSelection"): nodes = cmds.ls(selection=True) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 69af4c777d..45ec5b124e 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,6 +1,7 @@ import re from maya import cmds +from openpype.pipeline.publish import KnownPublishError import pyblish.api @@ -26,16 +27,14 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - msg = "You have multiple proxy meshes, please only use one" - assert len(members) == 1, msg + if len(members) != 1: + msg = "You have multiple proxy meshes, please only use one" + raise KnownPublishError(msg) instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - # Indicate to user that it'll be a single frame. - sequence = instance.data.get("exportSequence", False) - if not sequence: - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) # Use camera in object set if present else default to render globals # camera. diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 7fc0cc1b2f..3442d47ae9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -21,7 +21,7 @@ class ExtractAssStandin(publish.Extractor): staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) - filenames = list() + filenames = [] file_path = os.path.join(staging_dir, filename) # Mask @@ -78,33 +78,21 @@ class ExtractAssStandin(publish.Extractor): ) cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") + self.log.info("Extracting ass sequence") - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) - exported_files = cmds.arnoldExportAss(**kwargs) + exported_files = cmds.arnoldExportAss(**kwargs) - for file in exported_files: - filenames.append(os.path.split(file)[1]) + for file in exported_files: + filenames.append(os.path.split(file)[1]) - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(**kwargs) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + self.log.info("Exported: {}".format(filenames)) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a74f8e5827..0b4ee704de 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -172,7 +172,6 @@ "defaults": [ "Main" ], - "exportSequence": false, "expandProcedurals": false, "motionBlur": true, "motionBlurKeys": 2, From a209140cd6f44ce62beb097f220a7fa3d21d1fa1 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:39:51 +0100 Subject: [PATCH 406/409] :bug: handle single frames --- openpype/hosts/maya/plugins/publish/extract_ass.py | 8 +++----- .../projects_schema/schemas/schema_maya_create.json | 5 ----- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 3442d47ae9..0678da6549 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -100,13 +100,11 @@ class ExtractAssStandin(publish.Extractor): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filenames, - "stagingDir": staging_dir + 'files': filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + 'frameStart': kwargs["start"] } - if sequence: - representation['frameStart'] = kwargs["start"] - instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 6cf11e4cea..f66b0181de 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -218,11 +218,6 @@ "label": "Default Subsets", "object_type": "text" }, - { - "type": "boolean", - "key": "exportSequence", - "label": "Export Sequence" - }, { "type": "boolean", "key": "expandProcedurals", From db4139fc3774b4f70999e1d06a8fe2491291dd40 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:05 +0000 Subject: [PATCH 407/409] Remove redundant viewport lib --- openpype/hosts/maya/api/viewport.py | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 openpype/hosts/maya/api/viewport.py diff --git a/openpype/hosts/maya/api/viewport.py b/openpype/hosts/maya/api/viewport.py deleted file mode 100644 index cbf78ab815..0000000000 --- a/openpype/hosts/maya/api/viewport.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -"""Tools for working with viewport in Maya.""" -import contextlib -from maya import cmds # noqa - - -@contextlib.contextmanager -def vp2_paused_context(): - """Context manager to stop updating of vp2 viewport.""" - state = cmds.ogs(pause=True, query=True) - - if not state: - cmds.ogs(pause=True) - - try: - yield - finally: - if cmds.ogs(pause=True, query=True) != state: - cmds.ogs(pause=True) From 8d8753b7293969374c26e733efea0452cc1b0048 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:17 +0000 Subject: [PATCH 408/409] Clean up collector --- openpype/hosts/maya/plugins/publish/collect_ass.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 45ec5b124e..b5e05d6665 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,5 +1,3 @@ -import re - from maya import cmds from openpype.pipeline.publish import KnownPublishError @@ -33,9 +31,6 @@ class CollectAssData(pyblish.api.InstancePlugin): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) - # Use camera in object set if present else default to render globals # camera. cameras = cmds.ls(type="camera", long=True) From 096cda17623121aaaad582068b200001b61e471b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:35 +0000 Subject: [PATCH 409/409] Fix frame flags. --- .../hosts/maya/plugins/publish/extract_ass.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 0678da6549..049f256a7a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -16,9 +16,6 @@ class ExtractAssStandin(publish.Extractor): asciiAss = False def process(self, instance): - - sequence = instance.data.get("exportSequence", False) - staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) filenames = [] @@ -60,6 +57,9 @@ class ExtractAssStandin(publish.Extractor): # Write out .ass file kwargs = { "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), "selected": True, "asciiAss": self.asciiAss, "shadowLinks": True, @@ -78,14 +78,9 @@ class ExtractAssStandin(publish.Extractor): ) cmds.select(instance.data["setMembers"], noExpand=True) - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) + self.log.info( + "Extracting ass sequence with: {}".format(kwargs) + ) exported_files = cmds.arnoldExportAss(**kwargs) @@ -102,7 +97,7 @@ class ExtractAssStandin(publish.Extractor): 'ext': 'ass', 'files': filenames if len(filenames) > 1 else filenames[0], "stagingDir": staging_dir, - 'frameStart': kwargs["start"] + 'frameStart': kwargs["startFrame"] } instance.data["representations"].append(representation)