From bff817afd999ddf2536e48340d0bae0ce049b1cf Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 8 Apr 2022 18:59:33 +0200 Subject: [PATCH 001/202] wip on new publisher conversion --- openpype/hosts/houdini/api/lib.py | 18 +++++++++++++ openpype/hosts/houdini/api/plugin.py | 24 +++++++++++++++-- .../hosts/houdini/hooks/set_operators_path.py | 25 ++++++++++++++++++ openpype/hosts/houdini/otls/OpenPype.hda | Bin 0 -> 8238 bytes .../plugins/create/create_pointcache.py | 5 +++- 5 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py create mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index bd41618856..911df31714 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -453,3 +453,21 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def load_creator_code_to_asset( + otl_file_path, node_type_name, source_file_path): + # type: (str, str, str) -> None + # Load the Python source code. + with open(source_file_path, "rb") as src: + source = src.read() + + # Find the asset definition in the otl file. + definitions = [definition + for definition in hou.hda.definitionsInFile(otl_file_path) + if definition.nodeTypeName() == node_type_name] + assert(len(definitions) == 1) + definition = definitions[0] + + # Store the source code into the PythonCook section of the asset. + definition.addSection("PythonCook", source) \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2bbb65aa05..64abfe9ef9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -2,11 +2,17 @@ """Houdini specific Avalon/Pyblish plugin definitions.""" import sys import six - +from abc import ( + ABCMeta, + abstractmethod, + abstractproperty +) +import six import hou from openpype.pipeline import ( CreatorError, - LegacyCreator + LegacyCreator, + Creator as NewCreator ) from .lib import imprint @@ -84,3 +90,17 @@ class Creator(LegacyCreator): OpenPypeCreatorError, OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator): + _nodes = [] + + def collect_instances(self): + pass + + def update_instances(self, update_list): + pass + + def remove_instances(self, instances): + pass \ No newline at end of file diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py new file mode 100644 index 0000000000..6f26baaa78 --- /dev/null +++ b/openpype/hosts/houdini/hooks/set_operators_path.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from openpype.lib import PreLaunchHook +import os + + +class SetOperatorsPath(PreLaunchHook): + """Set path to OpenPype assets folder.""" + + app_groups = ["houdini"] + + def execute(self): + hou_path = self.launch_context.env.get("HOUDINIPATH") + + openpype_assets = os.path.join( + os.getenv("OPENPYPE_REPOS_ROOT"), + "openpype", "hosts", "houdini", "hda" + ) + + if not hou_path: + self.launch_context.env["HOUDINIPATH"] = openpype_assets + return + + self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( + hou_path, os.pathsep, openpype_assets + ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda new file mode 100644 index 0000000000000000000000000000000000000000..b34418d422b69282353dc134b1c4855e377c1039 GIT binary patch literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t literal 0 HcmV?d00001 diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index feb683edf6..27112260ad 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,7 +1,7 @@ from openpype.hosts.houdini.api import plugin -class CreatePointCache(plugin.Creator): +class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" name = "pointcache" @@ -9,6 +9,9 @@ class CreatePointCache(plugin.Creator): family = "pointcache" icon = "gears" + def create(self, subset_name, instance_data, pre_create_data): + pass + def __init__(self, *args, **kwargs): super(CreatePointCache, self).__init__(*args, **kwargs) From 0ac27ab609a1198255e2fdad846f7be698e0e725 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 12 May 2022 13:19:29 +0200 Subject: [PATCH 002/202] start of integration --- openpype/hosts/3dsmax/__init__.py | 0 openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.ms | 8 ++++ openpype/hosts/3dsmax/startup/startup.py | 2 + openpype/resources/app_icons/3dsmax.png | Bin 0 -> 12804 bytes .../system_settings/applications.json | 29 +++++++++++++ openpype/settings/entities/enum_entity.py | 1 + .../host_settings/schema_3dsmax.json | 39 ++++++++++++++++++ .../system_schema/schema_applications.json | 4 ++ 10 files changed, 83 insertions(+) create mode 100644 openpype/hosts/3dsmax/__init__.py create mode 100644 openpype/hosts/3dsmax/api/__init__.py create mode 100644 openpype/hosts/3dsmax/plugins/__init__.py create mode 100644 openpype/hosts/3dsmax/startup/startup.ms create mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/resources/app_icons/3dsmax.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/3dsmax/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/3dsmax/startup/startup.ms new file mode 100644 index 0000000000..94318afb01 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.ms @@ -0,0 +1,8 @@ +-- OpenPype Init Script +( + local sysPath = dotNetClass "System.IO.Path" + local sysDir = dotNetClass "System.IO.Directory" + local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + + python.ExecuteFile startup +) \ No newline at end of file diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py new file mode 100644 index 0000000000..dd8c08a6b9 --- /dev/null +++ b/openpype/hosts/3dsmax/startup/startup.py @@ -0,0 +1,2 @@ +# -*- coding: utf-8 -*- +print("inside python startup") \ No newline at end of file diff --git a/openpype/resources/app_icons/3dsmax.png b/openpype/resources/app_icons/3dsmax.png new file mode 100644 index 0000000000000000000000000000000000000000..9ebdf6099f6ac279ebaeccdf21a658a34da5e7b6 GIT binary patch literal 12804 zcmcJ0_gj-q&@M#;LPtR9O&|~uL5fK4O*#ol6fhK}S&-hOhAusTs7NnK2mxu*jUX6Q zdJ(0FA{~_8`5xZy`~&B@&iNt9l_$Hiv$M0a_uR8durxPh0`Y(-C@7eWjr6Q3C@7`R zUvw9NCqZE;e!vH%kF}u=1>q~-67c2118p;H3W}OkhGP^B@SXmN5yFRpg89Swi?Zi& zg$o6R+zVqpZQDSHwb%5ye9(7$Ef0*v-@Vz}dRIh+pr^m8B_ynRm9I+#H*QZY!Qrbc^HJ*$UXz7JZ;saBVHp>%jM>f>E zYc|vx+rm0@lHzj}DfTTI_EBxy68>u{`rUH0_XioCuRe@iGHx?f%Pn94E%_jACeV9Hac~(PZ9tUT7sGm`Z zUN=BK(nXa<{NUif(@?QFpM%>J%c-dSas4Xo^znVD#zrXA;rJpnuS096<$z0ZcADkl z+1GepKrAt!+QYH&YcEKd>61V!;_j13DOnXL1FrQ+x`+Kc6Q+rpXshf#OE`V>t%MQ* z89T?RlWP{WG_|)j)YwMZMRCfrR&b(`A%a@77u<&c<6XEXzEvJpIaZS&hdTfLqBjII zyiI`sqxGt@k#Oa1bXdx)T{KeDzbk6NvmTKA@>J2b6B z!T1Z`TI_aanyfIgZ6mC{HtE0Lt;&B$7Gbmo2xc9WSUerh zZw<-y{q2P!(l3>Tz*6o~LT6o`65D2H; z3X9ENAvi;HC$(Vv+_LIzD1^IZUj{fyG?rN#rAT*&`&_+!cBsZqor}x-d*u0OOd=bf zA9}eO<@Cwg%rnT|z6;~=?fA~7Yk=sEBY9Bh+4(~+8rUsGyji1J;C&afRFg)*yEW5&&P61w&WOS0HB7-6moQkE#Y7=SEoRoTYcN%Lk z!t|_#a}RPyT05`S_%K?X>n;t((+M>o+gqG1=m%X0YYW-JRabzQx0)X9lJOyry^!WvH; zAa^QOJ5eC2$bk;`GA7!kKg+r7g`8T#{j{Vyq1;8$ZXC79cSFyEp)$T%-B4o&N;H%f zPX_GhrUkpFf2kJ)^ckz&j3Z$w?{F76yVi#W`Sg-WVB8f;xZ>YNdE$hFU_4eRGxt&t zA*UM*aRlRedIj(E-r**pT+zm#pZd^FZ>N0V_Xkm+p)~2xo-(s3Wv2l%6GkvW6eKfw z^-oES71zR*KeT8l`m6cqM_BBk7A#&X>1Kio&1x_rx%O!k2}H4x@8;UHEY%=CLLy^( zGjlWV3mY=}ph?SLI@?10Ak@ma6_hT(aAC$UEX@OTP~91N-VdLbp7z4&v)26~Wu!}j3wjZcUN zm77ZrZJl7l zqJmaUZ@;P#G>yJkIrR4@R?Rb8vO#nR)KvMB=W$nBJwCaRh^25xnLR$f-SF7%J*;x5Lgw@eu8mY;75woD?W|!JrWU_YxQNX-yLY|E`VP<~5HIo~ zBEyfAFFkzyL}w6_aMKQ6Fe!r&@Wn@lEJ4Qgj2IbZA@G-F`kPr#TDmfK|(rGL8+-RPeIR__`IBC*p+`N{p_E&=wL> zmYm~&MYd+u{5B!q462pl_irMY?W94kpeT<86E}e+*p%zZb;G^5BQ%kN;pc#v1WFcX z=kU93mnWy$4O$y;jQ_hC?7Pt`l}MDX4y#IOc+$3Mymk8_&bU}+*VXj`8bcX4-rcVR zc8zIDOp_;j4yMNOP1;w3M1T(Q4a_&S#=FR-=AK{OlB{4#mT+z-n~z89HE`d#_)2PBXO60(Jst69MdX_?Y*O56B zh^yWgk5)Z@Q6U_Ud%+t6G|j+U8t>;<3dwI56zo1!j^}iTG1^3tq^`VvT{i7%$UUkMenNv#^}k=y^Orb#O+-{=3XqC8lML2unl+ZqiVlg}WshQGZS)X*Ut#=vKRzH~ zJeF{p(jvL~vt~c+!%?YGd7t~go>zi*i+ey+i6W4+5+Vg**lRfCkF4d|xZ~qz;o2Q{ z*abm-l*f|4{(Ql{`N3Fanv-YM2$mErlA3CJ7Q+U$MMRdMUA61bq~mlErv&R)DS%_X z4$59`I(jr#n-NQT%J@>=DeIs5hSge*eHqeR3Xu(`M$nha#VLwI(#p=)Y&X-*bO`oi-`spd z#Z|5tuTVhDiy&JWlq8qGzHe=ycUOm=sqi@&9yp;h{x2teS8(Cufa_?&f|a*x?%xj| zvphv}YXdsSZE|ioQ93xdpYpyI<)%y^LrlJN{5c?au8UHn2e&c=hlKGWpky*j(X~}M zx}cw)>GA@aHkgNGx*(ba936>DX_8098gu{4HUAHl&ubfq*`wDvV3#NmpPavcQI5-m zik)Wt6EWtV7l#x+fPh`4flZW_t(Kjde^1=zvcja4-)QX>SH%nH`J4H88I*ONf%cpjTY87TCFSD@==I?wFFC)AVXD4rVuoq!K=HRyK5lao%9*y(@Hxhu^cG z9LGk)oBufR?Na|)5C!MHtlBVn3CL_#)&z7aSETi_z_L9zHn>r

{SJ~!Y9>I3PqWedKP*SW{qUGfR^((-q<{G5uF z=Swwx2i~q_{TRd`=lM#obHE}a@k(CTAJXLVUb&ivPk=~k0{+s4xPOB&oSchk#Y@-N z&Aoc}!thRVt&YgjQ9zTa~O5;S(^--a|1z*l*QC;GD78qa`hLkLR7&#Ck<|OaeJea)>07gz`5RO6% zrn5Sas=aE8I`~(fJYoD`Qo&w&JgUaZZfG-#)!W|dO>6@r|HDr!birZIJHZe&4%pWT z^xmW9Pq*tOYw=nRq|Ioz9)Dc_i((Ta?zqw{`AIj1%*qFR4`S$}9PFqUp~f{InC--> zn9vwee;FL_W9eruu98tiX^f<3!#_hib!&s~BI&jx=oNY+(sB4$rYN2k6%}4NN zElD-7M5_PMyQ}U%iia9hI)7`XVK*>7T_$HGHY2+~1;$nb+?d+ozuskbg0Y zgW1QS+^#fQsL~5Ae&hD{WpMY?ITsm7o6hrSy4x0A`S%)>8D7dy_TIKOr7eC+JdUi%a_sOLi&#Eb!%AaT34hak`qKyR$#6Z#5A3X!n zBon$`kA8vC4b9UJy<~Jyq;~-^*vZB~&FO`!I@=vNSwu-Mwm((C8FZw`W4>d;9A7(; zqmNQdZp?-r$uwR;D0;fM77~|#S#V#zW-E5P%K-2JL$BUGgFCs?H~f$Ruko9~O5{0a zRSuZ0P%Zo9^gm|hv04+rXL(H1w;m zUOEW;nK^6Q_yn?qf0`3?&GIG2But1hE6wTT-8@}fUs6{hH@C1DA~;|dgm3^?v*I`Y zu5C_7)Dpflh><6Eh&2yMiYj6Fm2USx>^jTg$_IFcov?teG$8ME0ixAmEuI36(Kv^o zT*l=1`P`c3rNK+vAgoN^*Y%cqK&_$uCSE_TaHG}uX*G(Ep*V*m?Zhk%+bLU)=mUXFP{>P-qG^ zuCY3&7=4uM^XEi!-55nXOzN9T6L$A{#{hm&X%OyW1!~HN#S_i}JKX;+Ffjkf6Mjj2 z+3(j5h#8Nvg8N0*H9VRJ3!^d2xAHRTSkCg*?>tVQ3CRMeBF8UQIv@yuD;mp%j)+o^ zZnE=-la<5UrOb2$$KnZJ_iDpm%<|zG(#W(z~p&T=~(3~trw1XT5hkyU!5gD5_pT)Xhs9e+d>PY1aV zN--Y8HU2?_OGYxCc}PS3-2uGK_x9)xk@==2ri|aYB-Tf&y1W>7gWH}G*bqW82fE8h z@46SI`Y346Sm}0Ru|&n@L`}wK5G2LqLMe@_9pC7iOCR~To_1Rq*ztfM!o*E*$qoP- z#WG$(ksL6KRj{DI;DidQ=-Wm7bly7r*1^P*!BcH347whd4=_qp?x^uss_WNDa|D$y z37YeFh+4}2vD1pQdQ*yqsf_Z?LqMZn`p95MogcG5a)?2H%R?t}{yteXz2XE3;e|5s zd+=y^zqawzgK`18u8?R=mCoh=?4wyqGi?6(hE$jtTxnkSs!PB#0j1A$Qs8;q1;@B* zl}#g2p}Ua9pr`>=O~?M%Z%%+{DKfO&;ax1{=&iqHgp;LX;5D3>WCzXJlwpT@(+#vx zS82R#6+@dOJ9TZ-90#^&gf$%rKLr49mQNcynyE(S=_7%#sb_gUp2Dx2)kkVwux-q& z=f9g@m8--TRXiip-;XL^Z^p)u$P`96M0UBm5~?uU1ugH{kj>~}hP;Ke7A}%x5RvU= z8us{&OfQy7qI)~fxNA?NgL1u4A{HL;)fC4al*Y5pW)Ib$?G;S2LWAH8w+JfbeN?upRS@TYUb*7}Cl{LsJRMqe$LH z{XE=3a(WaNL+-RJJL3{!BT3{dMhvPOc!#yJy1VRLU6F2N&7Xm*=~xAqcs35bONQ25 zufk_3v1+a+jjRwyU0YUrluOzm6ae4MT1Wz+az2W9Tt&C_r*fz(;eNFyFH^Vi!SQV9 z?fP^&v+}Ix22ieD`_VWOnI2y`Kes-p&&d^2V=Ka^r($$45ayur^QMu86;YA!K6?HT z$fJ5ny4rDvmT;B1s>;#eU(eTX=UvMndhx{4^_bt(n4=pG%hw|>7#c#4me&1C9^U$L zTLfEsy%|J{#@tBD9@tF@XsRiLv7K9u_2a8|=Rx+x6EB{j)H+@evc#bLp2-%FUsr(QWiS^<%jvAYSNs7B2BszFMrig6tGPLiuvC zgK!aW_o<-rbiMQvOv(97oFe$Hn<#x)(Xdbjc7r-Ju%`5Im_(eu^`@m^0{Q+524NwnqcWHU)k=V&0w5 zDt~4KNdy#I(6@18&>|}b*Ga9-%mO`$R-C&Ox1S=-^nBM6MWY7`iG1%(R|Cp>Lu3jZB(y-wnRE%4+gg=rSFj<-O+)B>|5}quU zsfC^kt55iVEP-e>IxCD-oBB?DJsO72d*VWr1549J))C9HTTLHalfj#HJjJ{_xSUjX z|Le%nYN`CsCbqxGtF@%{zDjdkLi|!ob7afS9yIAGhY^FPhpU*39<1VxHr(|0M_$eL zg_vJz|7}c?Ktzw=%>=l{O^Vj`@@&H(zhcxcj)H@r8+F|7lBxGAHFY>Z5`0Y4U?19+ITwyWvf-_TUkP>l|Im`lTKUA1R8i*X+T9)aR)Q^DIV zSc@F6Gy2DS-UOPb-h&Uu1w4wUdorr~NYUkFPl!s8PKd5o*JWmjJ{g5A3koq1ximz`Hd%m<1zpKo>l7S=&}=9%qzaknxIMOetkjp>2*-$MCkG=_Yrve70>9QK0!+JiS$w;CQPJ_c)d=(6*J`sA*T4tGeDgl;@* z^ypZXAAP#{=|5^Yd98xgO4}Nvg|sPK9KjYELCjD;^V}!^&(Let&&BgyPG%(7_w5kQ zSu=`oa-+LVe-=vykFq&N!km!{Dwd1%nfNJEm75Mjk~YO5E9w+hN0Na!9W!6w`d5R{ zb9S%0EOc+G{Nm~a%DNqjYqrt&F`+b{1QL;1-MB0$&t%ftgQ?VhNy<36jqEODMVH%3dxl3oDe!gP>hIuFLE z92%U*Xx)D2@!F%+n1Wi9gQ9{`MQzz{)=(tijf#jMUqT4QeQmpQziS3Sl#`9dF(1V6 zfVc*AkaXk#!a*3cl+ysduURJr#1H==Z*Qa-2T=S2PELwVL#&t>8%AKh<@AtXr6E%( zQW9#FEC0>N(D+#y->+F`F#Ng^H{%WHBpI?(!~5t zeQk@pe3$I98vB3Rq~$!AXokb@HOec#KFY#)dsjv@C)FAFO-GH-q=K?dJ9xrqWnj<% z`3yxlf{kTS&cwW|q*P#hbt6}u1ICFh4V_q;C<~1*i+Z$q|3YS2%@FmF?&@appe7y8 z6ZwVvy|Uk>4~tT?U_6BoBxtqcS(``4w)Lod03o5s(x}1R(UDqGR-|gs`Rd6fD}GRgqg(niQp(Y26>33@F!!JCSv@c9ml<-1!Z}bl-`_D3M z+zfU}tH*^F5x$FWnD3|BpzcV;N>`<*St}A|nj4 z*7G)U?vs&Dt&JkUvQcTH&|RM11ASkwlM`8nJ`jwDX;SLEe9@fTSL``yK@ci8@1|b> zw`L_IbIh2eb_QT!xk93d&_-P_XB@8jzQ+VO$22(pO<{gG{JMLMKlCmb@5ZEzFOvgW(m1kbFWIj-^TZxO&{24yv2FJD~0+aX+65{IR<0_yc44=6*3`jF1SF zr7tDaPRyl=_@5T0)D7iu*VcmEu_g9x=?Fj{HdO12%!f&o#6unXW{MT*1? zH}B5p-E$$g?h0#u-;%xVWNeI#Cp7gJ02~3+T>Z@_Rd^0nyFt;LaKJWSuO>9R%!>P{Db8gPE2bYD7Qxs;K8*}euhB@D)Y)c)7@T88-> zg}a!7doyk$aS#Js6rmT|`0C}|%L=J75I2+6^Yv5mv?FMv#JZ;2u;;BKb<{j!A9TMA zuQ0qgbLwyK-;WoHq#JI!BxPr-Ex#@kty8-v@d|?*iQregV*lxNPVQJmQU#Tv)nFOY8M8e`E&+M+sXM3* zU|%TzUtAaZQB>!LlbQ+(@cVr>AMkBOE%e&wcbJ9lLf?M%_v6(r-y1r|d19G`N47?S zm`{AttLfFo4TFw$C#o@5>Cp2Ro3!6&^oN<-?{-~+zSyd?s8sSo&8uFX_3@cMi~3q4 zo$a|d^Ug-oV*UWt8MLPAUok8YboK8c=JS4+1eK$w`d=PLphk6Tqo#+ddSziEHT-V< zSRnK9jPF5Sfsc$`*gV9exfK@rSbJQETL<+{Tl1aqd%HK`4x_i)cfZffUB*t*gj_p) z{_=QbGrT^fC;Y^7R9smszvt`+>iw7S&bB@5%-P!GX--c;$Ix|^(Q6BOVH%;bfrK{A z)0%+N<(@`|Ky9Y9YL*P9XV>EM+P98oLLBW+I(fAbA~j^sG@#7=qZ739w&S=ofPqFZ z{P<{Qw2(!uz@)u`>P+LesGwDN+8NF-baO%Tu@T&8Fx$v?r(=3&nP0a3xRyt(nqdhx zu;Vc(vcTAJ_H5^e`u7MG@VqnwmbmkqKPC@ z&2f_Bs#PkfpD^g(f5(nzSY~!P`DQ6Pj%;2%wz0B}@Q%0(to%QArZ$4l?wq*k!PDNK zeQUcDw!sqqm%z2YpH||>N_&CbcM1u_&<=nA+=vbQdPCzZ=){M(phL(M3^qDh6FmFd zF0v0ldj5F(X6kQ7zyYjcb}$QX?9N0M3La6j6%bF*3;A28jfaErM4^aF8VANZEK{AL!n5JyGo*O+BZ2g@(nqZ`RUD z2HFfp`^VMK=FbgHUeG9aEUyYbfep1GdrBQopRW31N`P`Bpf*}}JXVkDULl?EXErtYs=_T{@fzZqmJ z!qYh}Yo-<~TvgEqJW~eM#`mOneg8d+uL9-t!P?`HH&tO>oLrzG!_e*G<vLU}i)34wBovx*{k&O6xztsH-ar z;fI-f=_>6q?130Px_AwN%J@gmHit}?t0}wxG?8?=Hb2USeQGh;_%&srUTUEESE~T- zeQaa>(OrbyNMi92)wvgxYxLG(<)cBupOmxT5^bqbsfNEFBp*6z8rXG518x^gn%rA` zJ8k$Uf-mJqqea_kwBRbVdSSlM39 z?k5Re?B%6fd9NI0^NuTfn_Bs^ZioKW z-VpUU)-?V;KzER1=kQNsIIN3}%R?vn_eL4gO0YOKH_B$Rt}@5=gtooGf?J= zcTCotfMD<+ZYUR`+x@K&_x~E_#J;E!(y@tgaR2#`{h{qxO8)WG#)zWh33kt_Uo?ei zxuEik!SOF);Ap?Hwe0<70Cchyv8Mo4Jr$9ZvTimQ(un}CfDRB{DyLW3-v~uL@P*z3 zODk81N%P@OS2{*MbnK%6%WH@UZd8HwiZ7kW(9@fiqn`RaJ~-WLgKTSPh*sNG#|k+H zY(CKV6{fVW#g8APYl%|q64_lpA1)3iluy`UkUgP$z8%O1CrPkSw%6(Q$W4r$fw zID}r?2nLndCEi`xXDXjy{CTbD~`Y8aO=%z+R=K_)Xt-ths^QUKM3Sz_AZhs&*v_ISA7#~(q__5 zkT2R4glk;Pc;_hcqCm8kxZZweYP0Hb(1SgV0jG#lY2NzT73I+v%v%?Ii~BtBp#<=3 zh}Q4DFP9zu5|Bsxj%@3#7U4(+)Ai_1&y4@vSsgX?oWB31@_4e;n6UaKciTIKLW#sx zGHtzxI6i}depbZ~xgW_gVy{T0^FamEf0^dhSpyHvX#ur-PcvfwG@4)xqWW*bJDqR) zJy2~so{n1@!&_QIv0y6Q-6oaj6fj9=M8SWiz4M}N$ZWlnqJ{J`+LWBEog&#VV?%|} zl=5e1Av??JA-0~ky=VOjVX=9(Bf9Q%ZaFEPy&a+$fdF2^9U=u#5kmb zCoE|aP2O7x9_40p>un`nM57laJ{JXfvlvlt2^AYrSJStgj+or6AqtPBoteF^e=m@= z%~OBfykAo2e`WRS>jZ<6OY}eJn+Rrc{JkV1gDVQryn<-ExhIrEvRYOKHVFCSf$0!^u4m%*wmPO9x*K7xo_jj zEKBtFZ{Za{f928~Yq`cIQlHf=gYB=NP}}Q@uU{*+e9ydTDHu&$9?igO$yVQ>h^X+G zIN4jF^Cj!DROYO3oC$Y?&r?17FTR0Eq;b*LHNfpxz8JK@R}WD?R18hR=V$wGQ016vtDg84%TON=togFmI7ffcv#$L0TR4SJh$|L;y);LMS) z1-z};VZ7xzVLfnJ@_b-OVNvY(sJHC6+TnXzzbgD#Jc4q}ReaqPJrE})hUA~M9) ztpHIyYcmHaTs8xW6c;Gi?um{fXvj(bD15DdBed^?xpIks>VZwrK%%2`3LG#-=mSVTvCk4 zU!WNVmpJ}d5B~i#XJV+&^Nl!{bF7DE&DP@?w|#Nw%u?d${9EPkUF~rSB~noMYx|iQue7^h{!hlc0QNlE&N1rq^271^7KK9+!T!_U-6~33L&Vdnm-(UA;b+?8l!5$< z4|Ib)>?(!M@c_*qs-ipmgIP35)7r2K2AbE-@0k*N_1^qa6`akh&h(^xP5b0p+3D(<0KScwn)oZux2&o&FTa^E6zI%%?qgt?+QByfI zY`~AgK`@_Te5->wgL?C*fXGM1#yZq)Lb}!y#d~crR8OtRe?TSMW8pA?Ju>E7mE6tPv$$dP4xhIEK4l;Z#HF9vXw-|F;9CFRlXk_D@dkuaN~-|FO!yB^&0)-6>{ l$`H2yo-zHX_gp)p4J|sLUs3g50RFi_VXSYiN6@(!^*@6=RyY6v literal 0 HcmV?d00001 diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 0fb99a2608..aaecef3494 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1232,6 +1232,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "3dsmax", + "environment": { + + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "djvview": { "enabled": true, "label": "DJV View", diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index 92a397afba..b6004a3feb 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -154,6 +154,7 @@ class HostsEnumEntity(BaseEnumEntity): """ schema_types = ["hosts-enum"] all_host_names = [ + "3dsmax", "aftereffects", "blender", "celaction", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json new file mode 100644 index 0000000000..f7c57298af --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json @@ -0,0 +1,39 @@ +{ + "type": "dict", + "key": "3dsmax", + "label": "Autodesk 3ds Max", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "schema_template", + "name": "template_host_unchangables" + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + }, + { + "type": "dict-modifiable", + "key": "variants", + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 20be33320d..36c5811496 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,6 +9,10 @@ "type": "schema", "name": "schema_maya" }, + { + "type": "schema", + "name": "schema_3dsmax" + }, { "type": "schema", "name": "schema_flame" From 6067b1effcca66198836b3519c1a2f9b6cd73872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 16:02:57 +0200 Subject: [PATCH 003/202] :minus: delete avalon-core submodule --- repos/avalon-core | 1 - 1 file changed, 1 deletion(-) delete mode 160000 repos/avalon-core diff --git a/repos/avalon-core b/repos/avalon-core deleted file mode 160000 index 2fa14cea6f..0000000000 --- a/repos/avalon-core +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2fa14cea6f6a9d86eec70bbb96860cbe4c75c8eb From f2a1a11bec47855f1409b6620c618fa3bd89c550 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:41:57 +0200 Subject: [PATCH 004/202] :lipstick: add new publisher menu item --- .../hosts/houdini/startup/MainMenuCommon.xml | 10 ++--- openpype/tools/utils/host_tools.py | 37 +++++++++++++++++++ 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index abfa3f136e..c08114b71b 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -1,10 +1,10 @@ - + - + - + - + Date: Tue, 30 Aug 2022 18:42:44 +0200 Subject: [PATCH 005/202] :fire: remove workio workio integrated into host addon --- openpype/hosts/houdini/api/workio.py | 57 ---------------------------- 1 file changed, 57 deletions(-) delete mode 100644 openpype/hosts/houdini/api/workio.py diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py deleted file mode 100644 index 5f7efff333..0000000000 --- a/openpype/hosts/houdini/api/workio.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Host API required Work Files tool""" -import os - -import hou - - -def file_extensions(): - return [".hip", ".hiplc", ".hipnc"] - - -def has_unsaved_changes(): - return hou.hipFile.hasUnsavedChanges() - - -def save_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.save(file_name=filepath, - save_to_recent_files=True) - - return filepath - - -def open_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.load(filepath, - suppress_save_prompt=True, - ignore_load_warnings=False) - - return filepath - - -def current_file(): - - current_filepath = hou.hipFile.path() - if (os.path.basename(current_filepath) == "untitled.hip" and - not os.path.exists(current_filepath)): - # By default a new scene in houdini is saved in the current - # working directory as "untitled.hip" so we need to capture - # that and consider it 'not saved' when it's in that state. - return None - - return current_filepath - - -def work_root(session): - work_dir = session["AVALON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return os.path.join(work_dir, scene_dir) - else: - return work_dir From 2f6a6cfc9a2676d3361e4fc11e0e182de2a4057d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:15 +0200 Subject: [PATCH 006/202] :alien: implement creator methods --- openpype/hosts/houdini/api/plugin.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 64abfe9ef9..fc36284a72 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -3,17 +3,17 @@ import sys import six from abc import ( - ABCMeta, - abstractmethod, - abstractproperty + ABCMeta ) import six import hou from openpype.pipeline import ( CreatorError, LegacyCreator, - Creator as NewCreator + Creator as NewCreator, + CreatedInstance ) +from openpype.hosts.houdini.api import list_instances, remove_instance from .lib import imprint @@ -97,10 +97,17 @@ class HoudiniCreator(NewCreator): _nodes = [] def collect_instances(self): - pass + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) def update_instances(self, update_list): - pass + for created_inst, _changes in update_list: + imprint(created_inst.get("instance_id"), created_inst.data_to_store()) def remove_instances(self, instances): - pass \ No newline at end of file + for instance in instances: + remove_instance(instance) + self._remove_instance_from_context(instance) From 20e25e111bdd41b31415142d3f3fd74460ebbaaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:44:48 +0200 Subject: [PATCH 007/202] :alien: change houdini to host addon --- openpype/hosts/houdini/api/__init__.py | 32 +--- openpype/hosts/houdini/api/lib.py | 52 ++++-- openpype/hosts/houdini/api/pipeline.py | 167 +++++++++++------- .../houdini/startup/python2.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.9libs/pythonrc.py | 6 +- 6 files changed, 158 insertions(+), 111 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index fddf7ab98d..f29df021e1 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,24 +1,15 @@ from .pipeline import ( - install, - uninstall, - + HoudiniHost, ls, containerise, + list_instances, + remove_instance ) from .plugin import ( Creator, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - from .lib import ( lsattr, lsattrs, @@ -29,22 +20,15 @@ from .lib import ( __all__ = [ - "install", - "uninstall", + "HoudiniHost", "ls", "containerise", + "list_instances", + "remove_instance", "Creator", - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - # Utility functions "lsattr", "lsattrs", @@ -52,7 +36,3 @@ __all__ = [ "maintained_selection" ] - -# Backwards API compatibility -open = open_file -save = save_file diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ab33fdc3f6..675f3afcb5 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,6 +1,9 @@ +# -*- coding: utf-8 -*- +import sys import uuid import logging from contextlib import contextmanager +import json import six @@ -8,9 +11,11 @@ from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import get_current_project_asset - import hou + +self = sys.modules[__name__] +self._parent = None log = logging.getLogger(__name__) @@ -29,23 +34,18 @@ def set_id(node, unique_id, overwrite=False): def get_id(node): - """ - Get the `cbId` attribute of the given node + """Get the `cbId` attribute of the given node. + Args: node (hou.Node): the name of the node to retrieve the attribute from Returns: - str + str: cbId attribute of the node. """ - if node is None: - return - - id = node.parm("id") - if node is None: - return - return id + if node is not None: + return node.parm("id") def generate_ids(nodes, asset_id=None): @@ -325,6 +325,11 @@ def imprint(node, data): label=key, num_components=1, default_value=(value,)) + elif isinstance(value, dict): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) @@ -397,8 +402,20 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects - return {parameter.name(): parameter.eval() for - parameter in node.spareParms()} + data = {} + for parameter in node.spareParms(): + value = parameter.eval() + # test if value is json encoded dict + if isinstance(value, six.string_types) and \ + len(value) > 0 and value[0] == "{": + try: + value = json.loads(value) + except json.JSONDecodeError: + # not a json + pass + data[parameter.name()] = value + + return data @contextmanager @@ -477,4 +494,11 @@ def load_creator_code_to_asset( definition = definitions[0] # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) \ No newline at end of file + definition.addSection("PythonCook", source) + + +def get_main_window(): + """Acquire Houdini's main window""" + if self._parent is None: + self._parent = hou.ui.mainQtWindow() + return self._parent diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 2ae8a4dbf7..b8479a7b25 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -3,7 +3,10 @@ import sys import logging import contextlib -import hou +import hou # noqa + +from openpype.host import HostBase, IWorkfileHost, ILoadHost +from openpype.tools.utils import host_tools import pyblish.api @@ -35,70 +38,96 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -self = sys.modules[__name__] -self._has_been_setup = False -self._parent = None -self._events = dict() +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): + name = "houdini" + def __init__(self): + super(HoudiniHost, self).__init__() + self._op_events = {} + self._has_been_setup = False -def install(): - _register_callbacks() + def install(self): + pyblish.api.register_host("houdini") + pyblish.api.register_host("hython") + pyblish.api.register_host("hpython") - pyblish.api.register_host("houdini") - pyblish.api.register_host("hython") - pyblish.api.register_host("hpython") + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + register_event_callback("before.save", before_save) + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - register_event_callback("before.save", before_save) - register_event_callback("save", on_save) - register_event_callback("open", on_open) - register_event_callback("new", on_new) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) + self._has_been_setup = True + # add houdini vendor packages + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") - self._has_been_setup = True - # add houdini vendor packages - hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") + sys.path.append(hou_pythonpath) - sys.path.append(hou_pythonpath) + # Set asset settings for the empty scene directly after launch of Houdini + # so it initializes into the correct scene FPS, Frame Range, etc. + # todo: make sure this doesn't trigger when opening with last workfile + _set_context_settings() - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile - _set_context_settings() + def has_unsaved_changes(self): + return hou.hipFile.hasUnsavedChanges() + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] -def uninstall(): - """Uninstall Houdini-specific functionality of avalon-core. + def save_workfile(self, dst_path=None): + # Force forwards slashes to avoid segfault + filepath = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=filepath, + save_to_recent_files=True) + return filepath - This function is called automatically on calling `api.uninstall()`. - """ + def open_workfile(self, filepath): + # Force forwards slashes to avoid segfault + filepath = filepath.replace("\\", "/") - pyblish.api.deregister_host("hython") - pyblish.api.deregister_host("hpython") - pyblish.api.deregister_host("houdini") + hou.hipFile.load(filepath, + suppress_save_prompt=True, + ignore_load_warnings=False) + return filepath -def _register_callbacks(): - for event in self._events.copy().values(): - if event is None: - continue + def get_current_workfile(self): + current_filepath = hou.hipFile.path() + if (os.path.basename(current_filepath) == "untitled.hip" and + not os.path.exists(current_filepath)): + # By default a new scene in houdini is saved in the current + # working directory as "untitled.hip" so we need to capture + # that and consider it 'not saved' when it's in that state. + return None - try: - hou.hipFile.removeEventCallback(event) - except RuntimeError as e: - log.info(e) + return current_filepath - self._events[on_file_event_callback] = hou.hipFile.addEventCallback( - on_file_event_callback - ) + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + hou.hipFile.removeEventCallback(event) + except RuntimeError as e: + log.info(e) + + self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback( + on_file_event_callback + ) def on_file_event_callback(event): @@ -112,22 +141,6 @@ def on_file_event_callback(event): emit_event("new") -def get_main_window(): - """Acquire Houdini's main window""" - if self._parent is None: - self._parent = hou.ui.mainQtWindow() - return self._parent - - -def teardown(): - """Remove integration""" - if not self._has_been_setup: - return - - self._has_been_setup = False - print("pyblish: Integration torn down successfully") - - def containerise(name, namespace, nodes, @@ -250,7 +263,7 @@ def on_open(): log.warning("Scene has outdated content.") # Get main window - parent = get_main_window() + parent = lib.get_main_window() if parent is None: log.info("Skipping outdated content pop-up " "because Houdini window can't be found.") @@ -370,3 +383,27 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) + + +def list_instances(): + """List all publish instances in the scene.""" + return lib.lsattr("id", "pyblish.avalon.instance") + + +def remove_instance(instance): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer instance, + because it might contain valuable data for artist. + + """ + nodes = instance[:] + if not nodes: + return + + # Assume instance node is first node + instance_node = nodes[0] + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + instance_node.removeSpareParmTuple(parameter) diff --git a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python2.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.7libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() diff --git a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py index afadbffd3e..683ea6721c 100644 --- a/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py +++ b/openpype/hosts/houdini/startup/python3.9libs/pythonrc.py @@ -1,10 +1,12 @@ +# -*- coding: utf-8 -*- +"""OpenPype startup script.""" from openpype.pipeline import install_host -from openpype.hosts.houdini import api +from openpype.hosts.houdini.api import HoudiniHost def main(): print("Installing OpenPype ...") - install_host(api) + install_host(HoudiniHost()) main() From 8ce7d45dd9ff120c959e302636134ca29c8a7bb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 30 Aug 2022 18:46:00 +0200 Subject: [PATCH 008/202] :construction: change to new creator style --- .../houdini/plugins/create/create_pointcache.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 27112260ad..052580b56f 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,14 +1,23 @@ +# -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import list_instances +from openpype.pipeline import CreatedInstance class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - - name = "pointcache" + identifier = "pointcache" label = "Point Cache" family = "pointcache" icon = "gears" + def collect_instances(self): + for instance_data in list_instances(): + instance = CreatedInstance.from_existing( + instance_data, self + ) + self._add_instance_to_context(instance) + def create(self, subset_name, instance_data, pre_create_data): pass From 1ca386c78d48cb3903499dd1d7adc5d1ac333a69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:46:53 +0200 Subject: [PATCH 009/202] :bug: add required key variant --- openpype/pipeline/create/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index eaaed39357..1b2521e4f7 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -435,6 +435,7 @@ class CreatedInstance: if key in data: data.pop(key) + self._data["variant"] = self._data.get("variant") or "" # Stored creator specific attribute values # {key: value} creator_values = copy.deepcopy(orig_creator_attributes) From d2233bc6f8c5c2541ad04c66cafa5e3419c2fbae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 1 Sep 2022 18:47:58 +0200 Subject: [PATCH 010/202] :wrench: new style creator --- openpype/hosts/houdini/api/lib.py | 97 ++++++++++++------- openpype/hosts/houdini/api/pipeline.py | 35 +++++-- openpype/hosts/houdini/api/plugin.py | 61 ++++++++++-- .../plugins/create/create_pointcache.py | 55 ++++------- 4 files changed, 164 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 675f3afcb5..5d99d7f363 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -281,7 +281,7 @@ def render_rop(ropnode): raise RuntimeError("Render failed: {0}".format(exc)) -def imprint(node, data): +def imprint(node, data, update=False): """Store attributes with value on a node Depending on the type of attribute it creates the correct parameter @@ -293,51 +293,50 @@ def imprint(node, data): Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value + update (bool, optional): flag if imprint should update + already existing data or leave them untouched and only + add new. Returns: None """ + if not data: + return + + current_parameters = node.spareParms() + current_keys = [p.name() for p in current_parameters] + update_keys = [] parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + templates = [] for key, value in data.items(): if value is None: continue - if isinstance(value, float): - parm = hou.FloatParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, bool): - parm = hou.ToggleParmTemplate(name=key, - label=key, - default_value=value) - elif isinstance(value, int): - parm = hou.IntParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, six.string_types): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, dict): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(json.dumps(value),)) - else: - raise TypeError("Unsupported type: %r" % type(value)) - - parm_folder.addParmTemplate(parm) - + if key in current_keys: + if not update: + print(f"{key} already exists on {node}") + else: + print(f"replacing {key}") + update_keys.append((key, value)) + continue + parm = parm_to_template(key, value) + # parm.hide(True) + templates.append(parm) + parm_folder.setParmTemplates(templates) parm_group.append(parm_folder) node.setParmTemplateGroup(parm_group) + if update_keys: + parms = node.parmTuplesInFolder(("Extra",)) + for parm in parms: + for key, value in update_keys: + if parm.name() == key: + node.replaceSpareParmTuple( + parm.name(), parm_to_template(key, value)) + def lsattr(attr, value=None, root="/"): """Return nodes that have `attr` @@ -407,9 +406,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value[0] == "{": + len(value) > 0 and value.startswith("JSON:::"): try: - value = json.loads(value) + value = json.loads(value.lstrip("JSON:::")) except json.JSONDecodeError: # not a json pass @@ -502,3 +501,35 @@ def get_main_window(): if self._parent is None: self._parent = hou.ui.mainQtWindow() return self._parent + + +def parm_to_template(key, value): + if isinstance(value, float): + parm = hou.FloatParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, bool): + parm = hou.ToggleParmTemplate(name=key, + label=key, + default_value=value) + elif isinstance(value, int): + parm = hou.IntParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, six.string_types): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, (dict, list, tuple)): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=( + "JSON:::" + json.dumps(value),)) + else: + raise TypeError("Unsupported type: %r" % type(value)) + + return parm \ No newline at end of file diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b8479a7b25..6daf942cf0 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -5,8 +5,7 @@ import contextlib import hou # noqa -from openpype.host import HostBase, IWorkfileHost, ILoadHost -from openpype.tools.utils import host_tools +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api @@ -38,7 +37,7 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create") INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): name = "houdini" def __init__(self): @@ -129,6 +128,16 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost): on_file_event_callback ) + def update_context_data(self, data, changes): + root_node = hou.node("/") + lib.imprint(root_node, data) + + def get_context_data(self): + from pprint import pformat + + self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) + return lib.read(hou.node("/")) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: @@ -385,9 +394,15 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(): - """List all publish instances in the scene.""" - return lib.lsattr("id", "pyblish.avalon.instance") +def list_instances(creator_id=None): + """List all publish instances in the scene. + + """ + instance_signature = { + "id": "pyblish.avalon.instance", + "identifier": creator_id + } + return lib.lsattrs(instance_signature) def remove_instance(instance): @@ -397,13 +412,15 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance[:] + nodes = instance.get("members") if not nodes: return # Assume instance node is first node - instance_node = nodes[0] + instance_node = hou.node(nodes[0]) + to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ parameter.eval() == "pyblish.avalon.instance": - instance_node.removeSpareParmTuple(parameter) + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fc36284a72..7120a49e41 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,9 @@ from openpype.pipeline import ( Creator as NewCreator, CreatedInstance ) +from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,18 +97,64 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): _nodes = [] - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self._nodes = hou.selectedNodes() + + # Get the node type and remove it from the data, not needed + node_type = instance_data.pop("node_type", None) + if node_type is None: + node_type = "geometry" + + # Get out node + out = hou.node("/out") + instance_node = out.createNode( + node_type, node_name=subset_name) + instance_node.moveToGoodPosition() + instance_data["members"] = [instance_node.path()] + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) self._add_instance_to_context(instance) + imprint(instance_node, instance.data_to_store()) + return instance + + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def collect_instances(self): + for instance in list_instances(creator_id=self.identifier): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): for created_inst, _changes in update_list: - imprint(created_inst.get("instance_id"), created_inst.data_to_store()) + instance_node = hou.node(created_inst.get("members")[0]) + current_data = read(instance_node) + + imprint( + instance_node, + { + key: value[1] for key, value in _changes.items() + if current_data.get(key) != value[1] + }, + update=True + ) def remove_instances(self, instances): for instance in instances: remove_instance(instance) self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 052580b56f..686dbaa7ab 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api import list_instances from openpype.pipeline import CreatedInstance +import hou + class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" @@ -11,50 +12,34 @@ class CreatePointCache(plugin.HoudiniCreator): family = "pointcache" icon = "gears" - def collect_instances(self): - for instance_data in list_instances(): - instance = CreatedInstance.from_existing( - instance_data, self - ) - self._add_instance_to_context(instance) - def create(self, subset_name, instance_data, pre_create_data): - pass + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - def __init__(self, *args, **kwargs): - super(CreatePointCache, self).__init__(*args, **kwargs) + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) - - self.data.update({"node_type": "alembic"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("members")[0]) parms = { - "use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output + "use_sop_path": True, + "build_from_path": True, + "path_attrib": "path", "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name, + "format": 2, + "facesets": 0, + "filename": "$HIP/pyblish/{}.abc".format(self.identifier) } - if self.nodes: - node = self.nodes[0] - parms.update({"sop_path": node.path()}) + if instance_node: + parms["sop_path"] = instance_node.path() - instance.setParms(parms) - instance.parm("trange").set(1) + instance_node.setParms(parms) + instance_node.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From e189b21e543bf0480d0dba31dd18c2b2107104c6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:05 +0200 Subject: [PATCH 011/202] :bug: set AttributeValues as new style class --- openpype/pipeline/create/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 1b2521e4f7..2962f43443 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -85,7 +85,7 @@ class InstanceMember: }) -class AttributeValues: +class AttributeValues(object): """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for From 13dd125e2677bda06f5afe21971a4e9893b01b5a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:55:37 +0200 Subject: [PATCH 012/202] :rotating_light: remove debug prints --- openpype/hosts/houdini/api/pipeline.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6daf942cf0..92761b7b4e 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" import os import sys import logging @@ -72,9 +74,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): sys.path.append(hou_pythonpath) - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile + # Set asset settings for the empty scene directly after launch of + # Houdini so it initializes into the correct scene FPS, + # Frame Range, etc. + # TODO: make sure this doesn't trigger when + # opening with last workfile. _set_context_settings() def has_unsaved_changes(self): @@ -133,9 +137,6 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): lib.imprint(root_node, data) def get_context_data(self): - from pprint import pformat - - self.log.debug(f"----" + pformat(lib.read(hou.node("/")))) return lib.read(hou.node("/")) From f09cd22e7ce6b8546f8a74f7b847edc2bf63eef5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:06 +0200 Subject: [PATCH 013/202] :recycle: remove unused import --- openpype/hosts/houdini/api/plugin.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 7120a49e41..ff747085da 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" import sys -import six from abc import ( ABCMeta ) From c0263462663f2d099a1db47850152fe7b6ee1791 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:56:40 +0200 Subject: [PATCH 014/202] :bug: set output name to subset name --- openpype/hosts/houdini/plugins/create/create_pointcache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 686dbaa7ab..3365e25091 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -29,7 +29,7 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(self.identifier) + "filename": "$HIP/pyblish/{}.abc".format(subset_name) } if instance_node: From 27d131f0eea1dfb74b750a0a6a1cc622d152b2ca Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Sep 2022 15:57:16 +0200 Subject: [PATCH 015/202] :recycle: optimize imprint function --- openpype/hosts/houdini/api/lib.py | 85 +++++++++++++++---------------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 5d99d7f363..f438944b09 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -17,7 +17,7 @@ import hou self = sys.modules[__name__] self._parent = None log = logging.getLogger(__name__) - +JSON_PREFIX = "JSON:::" def get_asset_fps(): """Return current asset fps.""" @@ -290,6 +290,11 @@ def imprint(node, data, update=False): http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html + Because of some update glitch where you cannot overwrite existing + ParmTemplates on node using: + `setParmTemplates()` and `parmTuplesInFolder()` + update is done in another pass. + Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value @@ -304,38 +309,48 @@ def imprint(node, data, update=False): if not data: return - current_parameters = node.spareParms() - current_keys = [p.name() for p in current_parameters] - update_keys = [] - - parm_group = node.parmTemplateGroup() - parm_folder = hou.FolderParmTemplate("folder", "Extra") + current_parms = {p.name(): p for p in node.spareParms()} + update_parms = [] templates = [] + for key, value in data.items(): if value is None: continue - if key in current_keys: + parm = get_template_from_value(key, value) + + if key in current_parms.keys(): if not update: - print(f"{key} already exists on {node}") + log.debug("{} already exists on {}".format(key, node)) else: - print(f"replacing {key}") - update_keys.append((key, value)) + log.debug("replacing {}".format(key)) + update_parms.append(parm) continue - parm = parm_to_template(key, value) # parm.hide(True) templates.append(parm) - parm_folder.setParmTemplates(templates) - parm_group.append(parm_folder) + + parm_group = node.parmTemplateGroup() + parm_folder = parm_group.findFolder("Extra") + + # if folder doesn't exist yet, create one and append to it, + # else append to existing one + if not parm_folder: + parm_folder = hou.FolderParmTemplate("folder", "Extra") + parm_folder.setParmTemplates(templates) + parm_group.append(parm_folder) + else: + for template in templates: + parm_group.appendToFolder(parm_folder, template) + node.setParmTemplateGroup(parm_group) - if update_keys: - parms = node.parmTuplesInFolder(("Extra",)) - for parm in parms: - for key, value in update_keys: - if parm.name() == key: - node.replaceSpareParmTuple( - parm.name(), parm_to_template(key, value)) + # TODO: Updating is done here, by calling probably deprecated functions. + # This needs to be addressed in the future. + if not update_parms: + return + + for parm in update_parms: + node.replaceSpareParmTuple(parm.name(), parm) def lsattr(attr, value=None, root="/"): @@ -406,9 +421,9 @@ def read(node): value = parameter.eval() # test if value is json encoded dict if isinstance(value, six.string_types) and \ - len(value) > 0 and value.startswith("JSON:::"): + value.startswith(JSON_PREFIX): try: - value = json.loads(value.lstrip("JSON:::")) + value = json.loads(value[len(JSON_PREFIX):]) except json.JSONDecodeError: # not a json pass @@ -478,24 +493,6 @@ def reset_framerange(): hou.setFrame(frame_start) -def load_creator_code_to_asset( - otl_file_path, node_type_name, source_file_path): - # type: (str, str, str) -> None - # Load the Python source code. - with open(source_file_path, "rb") as src: - source = src.read() - - # Find the asset definition in the otl file. - definitions = [definition - for definition in hou.hda.definitionsInFile(otl_file_path) - if definition.nodeTypeName() == node_type_name] - assert(len(definitions) == 1) - definition = definitions[0] - - # Store the source code into the PythonCook section of the asset. - definition.addSection("PythonCook", source) - - def get_main_window(): """Acquire Houdini's main window""" if self._parent is None: @@ -503,7 +500,7 @@ def get_main_window(): return self._parent -def parm_to_template(key, value): +def get_template_from_value(key, value): if isinstance(value, float): parm = hou.FloatParmTemplate(name=key, label=key, @@ -528,8 +525,8 @@ def parm_to_template(key, value): label=key, num_components=1, default_value=( - "JSON:::" + json.dumps(value),)) + JSON_PREFIX + json.dumps(value),)) else: raise TypeError("Unsupported type: %r" % type(value)) - return parm \ No newline at end of file + return parm From fe1a1055c27072a73d45172389b603b69d19d296 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 5 Sep 2022 18:03:38 +0200 Subject: [PATCH 016/202] :bug: store context on dedicated node instead of root node root node doesn't allow storing of spare parameters --- openpype/hosts/houdini/api/pipeline.py | 32 +++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 92761b7b4e..4ff6873ced 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -30,6 +30,7 @@ from .lib import get_asset_fps log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" +CONTEXT_CONTAINER = "/obj/OpenPypeContext" IS_HEADLESS = not hasattr(hou, "ui") PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") @@ -132,12 +133,37 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): on_file_event_callback ) + @staticmethod + def _create_context_node(): + """Helper for creating context holding node. + + Returns: + hou.Node: context node + + """ + obj_network = hou.node("/obj") + op_ctx = obj_network.createNode( + "null", node_name="OpenPypeContext") + op_ctx.moveToGoodPosition() + op_ctx.setBuiltExplicitly(False) + op_ctx.setCreatorState("OpenPype") + op_ctx.setComment("OpenPype node to hold context metadata") + op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) + op_ctx.hide(True) + return op_ctx + def update_context_data(self, data, changes): - root_node = hou.node("/") - lib.imprint(root_node, data) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + + lib.imprint(op_ctx, data) def get_context_data(self): - return lib.read(hou.node("/")) + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self._create_context_node() + return lib.read(op_ctx) def on_file_event_callback(event): From 1a7a52f44cb5dbc07b1fc53c9592c79d6da5156e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 6 Sep 2022 16:40:09 +0200 Subject: [PATCH 017/202] :recycle: members as nodes, change access to members --- .../hosts/houdini/plugins/publish/collect_active_state.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_frames.py | 2 +- openpype/hosts/houdini/plugins/publish/collect_instances.py | 6 ++++++ .../hosts/houdini/plugins/publish/collect_output_node.py | 2 +- .../hosts/houdini/plugins/publish/collect_redshift_rop.py | 2 +- .../houdini/plugins/publish/collect_render_products.py | 2 +- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 4 ++-- openpype/hosts/houdini/plugins/publish/extract_alembic.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_ass.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_composite.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 2 +- .../hosts/houdini/plugins/publish/extract_redshift_proxy.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_usd.py | 2 +- .../hosts/houdini/plugins/publish/extract_usd_layered.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py | 2 +- .../plugins/publish/validate_abc_primitive_to_detail.py | 2 +- .../houdini/plugins/publish/validate_alembic_face_sets.py | 2 +- .../houdini/plugins/publish/validate_animation_settings.py | 2 +- openpype/hosts/houdini/plugins/publish/validate_bypass.py | 2 +- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../houdini/plugins/publish/validate_file_extension.py | 2 +- .../hosts/houdini/plugins/publish/validate_frame_token.py | 2 +- .../hosts/houdini/plugins/publish/validate_no_errors.py | 2 +- .../plugins/publish/validate_primitive_hierarchy_paths.py | 2 +- .../houdini/plugins/publish/validate_sop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 +- .../houdini/plugins/publish/validate_usd_model_and_shade.py | 2 +- .../houdini/plugins/publish/validate_usd_output_node.py | 2 +- .../hosts/houdini/plugins/publish/validate_usd_setdress.py | 2 +- .../houdini/plugins/publish/validate_usd_shade_workspace.py | 2 +- .../houdini/plugins/publish/validate_vdb_output_node.py | 2 +- 32 files changed, 38 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 862d5720e1..dd83721358 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -24,7 +24,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance[0] + node = instance.data["members"][0] if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9bd43d8a09..cad894cc3f 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -24,7 +24,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index d38927984a..0187a1f1d8 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -47,6 +47,11 @@ class CollectInstances(pyblish.api.ContextPlugin): if node.evalParm("id") != "pyblish.avalon.instance": continue + # instance was created by new creator code, skip it as + # it is already collected. + if node.parm("creator_identifier"): + continue + has_family = node.evalParm("family") assert has_family, "'%s' is missing 'family'" % node.name() @@ -78,6 +83,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] + instance.data["members"] = [node] instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index 0130c0a8da..a3989dc776 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 72b554b567..33bf74610a 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index d7163b43c0..e88c5ea0e6 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance[0].path() + rop_path = instance.data["members"][0].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index e3985e3c97..c0a55722a5 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -19,7 +19,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance[0] + rop_node = instance.data["members"][0] save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -54,7 +54,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance[0]) # include same USD ROP + layer_inst.append(instance.data["members"][0]) # include same USD ROP layer_inst.append((layer, save_path)) # include layer data # Allow this subset to be grouped into a USD Layer on creation diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 83b790407f..7f1e98c0af 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -14,7 +14,7 @@ class ExtractAlembic(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index e56e40df85..03ca899c5b 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -14,7 +14,7 @@ class ExtractAss(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index f300b6d28d..eb77a91d62 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -15,7 +15,7 @@ class ExtractComposite(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 301dd4e297..4352939a2c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance[0] + hda_node = instance.data["members"][0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index c754d60c59..b440b1d2ee 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -14,7 +14,7 @@ class ExtractRedshiftProxy(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 0fc26900fb..9fa68178f4 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -16,7 +16,7 @@ class ExtractUSD(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 80919c023b..6214e65655 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(openpype.api.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance[0] + node = instance.data["members"][0] # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 113e1b0bcb..a30854333e 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -14,7 +14,7 @@ class ExtractVDBCache(openpype.api.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = instance.data["members"][0] # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 3e17d3e8de..b97978d927 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -33,7 +33,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index e9126ffef0..ee59eed35e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 5eb8f93d03..32c5078b9f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -36,7 +36,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index fc4e18f701..6a37009549 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -34,6 +34,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance[0] + rop = instance.data["members"][0] if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index a0919e1323..4433f5712b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -14,7 +14,7 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance[0] + node = instance.data["members"][0] if node.parm("use_sop_path").eval(): raise RuntimeError( "Alembic ROP for Camera export should not be " diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 543539ffe3..86ddc2adf2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -33,7 +33,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index b26d28a1e7..f050a41b88 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -37,7 +37,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance[0] + node = instance.data["members"][0] # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index 76b5910576..b65e9ef62e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -36,7 +36,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = instance.data["members"][0] # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index f58e5f8d7d..46210bda61 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -37,7 +37,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance[0]) + validate_nodes.append(instance.data["members"][0]) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 1eb36763bb..a0e580fbf0 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -30,7 +30,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): output = instance.data["output_node"] - rop = instance[0] + rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() if not build_from_path: cls.log.debug( diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a5a07b1b1a..a2a9c1f4ea 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -35,7 +35,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index ac0181aed2..95cad82085 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -24,7 +24,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 2fd2f5eb9f..bdb7c05319 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -37,7 +37,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 1f10fafdf4..0c38ccd4be 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -33,7 +33,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = instance.data["members"][0] cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index fb1094e6b5..835cd5977a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -21,7 +21,7 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): from pxr import UsdGeom - rop = instance[0] + rop = instance.data["members"][0] lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a77ca2f3cb..c5218c203d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -19,7 +19,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = instance.data["members"][0] workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 1ba840b71d..ac87fa8fed 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -36,7 +36,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path() + "ROP node '%s'." % instance.data["members"][0].path() ) return [instance] From 44518d2d85dcabe808c19b2f24ca64f21d096d90 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:15 +0200 Subject: [PATCH 018/202] :sparkles: add collector for member nodes --- .../publish/collect_members_as_nodes.py | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py new file mode 100644 index 0000000000..07d71c6605 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +import pyblish.api +import hou + + +class CollectMembersAsNodes(pyblish.api.InstancePlugin): + """Collects instance members as Houdini nodes.""" + + order = pyblish.api.CollectorOrder - 0.01 + hosts = ["houdini"] + label = "Collect Members as Nodes" + + def process(self, instance): + if not instance.data.get("creator_identifier"): + return + + nodes = [ + hou.node(member) for member in instance.data.get("members", []) + ] + + instance.data["members"] = nodes From 31c0e9050b84b015f104ba7d08275563b75dbbc6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Sep 2022 01:55:37 +0200 Subject: [PATCH 019/202] :rotating_light: fix hound :dog: --- .../hosts/houdini/plugins/publish/collect_usd_layers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c0a55722a5..c21b336403 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -54,8 +54,10 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance.data["members"][0]) # include same USD ROP - layer_inst.append((layer, save_path)) # include layer data + # include same USD ROP + layer_inst.append(instance.data["members"][0]) + # include layer data + layer_inst.append((layer, save_path)) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" From 26954b9377639b12fdbf3f67e36b0edf86582018 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:19 +0200 Subject: [PATCH 020/202] :recycle: fix name typo and refactor validator error --- .../publish/help/validate_vdb_input_node.xml | 21 +++++++++ .../plugins/publish/valiate_vdb_input_node.py | 47 ------------------- .../publish/validate_vdb_input_node.py | 13 +++-- 3 files changed, 30 insertions(+), 51 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml delete mode 100644 openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py deleted file mode 100644 index ac408bc842..0000000000 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = ValidateContentsOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index ac408bc842..1f9ccc9c42 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import ( + PublishValidationError +) class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" @@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" + raise PublishValidationError( + self, + "Node connected to the output node is not of type VDB", + title=self.label ) @classmethod From 59c13789e6924a700e269c30bec2d62327acbf09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:08:44 +0200 Subject: [PATCH 021/202] :rotating_light: fix hound --- openpype/hosts/houdini/plugins/publish/collect_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0187a1f1d8..0582ee154c 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -63,7 +63,8 @@ class CollectInstances(pyblish.api.ContextPlugin): data.update({"active": not node.isBypassed()}) # temporarily translation of `active` to `publish` till issue has - # been resolved, https://github.com/pyblish/pyblish-base/issues/307 + # been resolved. + # https://github.com/pyblish/pyblish-base/issues/307 if "active" in data: data["publish"] = data["active"] From 3b25a68552c6ec1c41f9351bdfcd5bde6626310f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 8 Sep 2022 16:09:09 +0200 Subject: [PATCH 022/202] :recycle: work on validation errors --- .../publish/help/validate_sop_output_node.xml | 21 +++++++++++++++++++ .../publish/validate_sop_output_node.py | 9 +++++--- 2 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml new file mode 100644 index 0000000000..0f92560bf7 --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a2a9c1f4ea..02b650d48e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishXmlValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -22,9 +24,10 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishXmlValidationError( + self, + message="Output node(s) `%s` are incorrect. " % invalid, + title=self.label ) @classmethod From 008479022108e013110c22c1eb95e2e026fb2938 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:14:03 +0200 Subject: [PATCH 023/202] :pencil2: fix typo in import --- openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index cf8d61cda3..81274c670e 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from openyppe.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib From 9e1fb2bc6c979b8a31cf3630af2b5ea76e58a337 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:10 +0200 Subject: [PATCH 024/202] :fire: delete validation error help file --- .../publish/help/validate_sop_output_node.xml | 21 ------------------- 1 file changed, 21 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml deleted file mode 100644 index 0f92560bf7..0000000000 --- a/openpype/hosts/houdini/plugins/publish/help/validate_sop_output_node.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - -Scene setting - -## Invalid input node - -VDB input must have the same number of VDBs, points, primitives and vertices as output. - - - -### __Detailed Info__ (optional) - -A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - - \ No newline at end of file From 831050799d6a1b1f0b1a51bcbc16f62fbd39f96c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:54:46 +0200 Subject: [PATCH 025/202] :bug: pass argument in deprecated function --- openpype/host/interfaces.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index cbf12b0d13..03c731d0e4 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -252,7 +252,7 @@ class IWorkfileHost: Remove when all usages are replaced. """ - self.save_workfile() + self.save_workfile(dst_path) def open_file(self, filepath): """Deprecated variant of 'open_workfile'. From e1a504ff3a831f5bd3ee5dd36914239613cb7b7c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Sep 2022 16:55:16 +0200 Subject: [PATCH 026/202] :recycle: refactor to new function calls --- openpype/hosts/houdini/plugins/publish/save_scene.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index 6128c7af77..d6e07ccab0 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -14,13 +14,13 @@ class SaveCurrentScene(pyblish.api.ContextPlugin): # Filename must not have changed since collecting host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() assert context.data['currentFile'] == current_file, ( "Collected filename from current scene name." ) if host.has_unsaved_changes(): - self.log.info("Saving current file..") - host.save_file(current_file) + self.log.info("Saving current file {}...".format(current_file)) + host.save_workfile(current_file) else: self.log.debug("No unsaved changes, skipping file save..") From 3501d0d23a78fbaef106da2fffe946cb49bef855 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:36:43 +0200 Subject: [PATCH 027/202] :wastebasket: move deprecation marks from comments to docstrings --- openpype/action.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/openpype/action.py b/openpype/action.py index de9cdee010..15c96404b6 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) -# 'RepairAction' and 'RepairContextAction' were moved to -# 'openpype.pipeline.publish' please change you imports. -# There is no "reasonable" way hot mark these classes as deprecated to show -# warning of wrong import. -# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in @@ -103,13 +105,19 @@ class RepairAction(pyblish.api.Action): plugin.repair(instance) -# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in From d59e188ab003d56d6ce8a71947f973b4a732ea01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:37:27 +0200 Subject: [PATCH 028/202] :recycle: add instance_node as separate parameter --- openpype/hosts/houdini/api/plugin.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ff747085da..f300496a43 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -111,7 +111,12 @@ class HoudiniCreator(NewCreator): instance_node = out.createNode( node_type, node_name=subset_name) instance_node.moveToGoodPosition() + + # wondering if we'll ever need more than one member here + # in Houdini instance_data["members"] = [instance_node.path()] + instance_data["instance_node"] = instance_node.path() + instance = CreatedInstance( self.family, subset_name, @@ -136,7 +141,7 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: - instance_node = hou.node(created_inst.get("members")[0]) + instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) imprint( From 42c6c846e479c344b6021101a5aa5d744372447a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:38:05 +0200 Subject: [PATCH 029/202] :alien: change error handling --- .../validate_abc_primitive_to_detail.py | 31 +++++++----- .../publish/validate_alembic_input_node.py | 27 +++++++---- .../plugins/publish/validate_camera_rop.py | 47 +++++++++++++------ .../validate_primitive_hierarchy_paths.py | 26 ++++++---- .../publish/validate_sop_output_node.py | 11 ++--- .../publish/validate_workfile_paths.py | 19 ++++++-- 6 files changed, 109 insertions(+), 52 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 40949b7042..55c705c65b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from collections import defaultdict - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" @@ -24,15 +24,24 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitives found with inconsistent primitive " - "to detail attributes. See log." + raise PublishValidationError( + ("Primitives found with inconsistent primitive " + "to detail attributes. See log."), + title=self.label ) @classmethod def get_invalid(cls, instance): - output = instance.data["output_node"] + output_node = instance.data.get("output_node") + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] pattern = rop.parm("prim_to_detail_pattern").eval().strip() @@ -67,7 +76,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually @@ -86,7 +95,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -94,7 +103,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = None for attr in pattern.split(" "): @@ -130,4 +139,4 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): "Path has multiple values: %s (path: %s)" % (list(values), path) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 2625ae5f83..aa572dc3bb 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,5 @@ import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -12,7 +11,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -20,18 +19,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitive types found that are not supported" - "for Alembic output." + raise PublishValidationError( + ("Primitive types found that are not supported" + "for Alembic output."), + title=self.label ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] - node = instance.data["output_node"] + output_node = instance.data.get("output_node") - if not hasattr(node, "geometry"): + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] + + if not hasattr(output_node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context # then for now we ignore - this allows us to also @@ -40,7 +49,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): return frame = instance.data.get("frameStart", 0) - geo = node.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) invalid = False for prim_type in invalid_prim_types: diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index f97c46ae9d..18fed7fbc4 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- +"""Validator plugin for Houdini Camera ROP settings.""" import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" @@ -14,30 +16,45 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data.get("instance_node")) if node.parm("use_sop_path").eval(): - raise RuntimeError( - "Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable." + raise PublishValidationError( + ("Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable."), + title=self.label ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() objects = node.parm("objects").eval() - assert root, "Root parameter must be set on Alembic ROP" - assert root.startswith("/"), "Root parameter must start with slash /" - assert objects, "Objects parameter must be set on Alembic ROP" - assert len(objects.split(" ")) == 1, "Must have only a single object." + errors = [] + if not root: + errors.append("Root parameter must be set on Alembic ROP") + if not root.startswith("/"): + errors.append("Root parameter must start with slash /") + if not objects: + errors.append("Objects parameter must be set on Alembic ROP") + if len(objects.split(" ")) != 1: + errors.append("Must have only a single object.") + + if errors: + for error in errors: + self.log.error(error) + raise PublishValidationError( + "Some checks failed, see validator log.", + title=self.label) # Check if the object exists and is a camera path = root + "/" + objects camera = hou.node(path) if not camera: - raise ValueError("Camera path does not exist: %s" % path) + raise PublishValidationError( + "Camera path does not exist: %s" % path, + title=self.label) if camera.type().name() != "cam": - raise ValueError( - "Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name()) - ) + raise PublishValidationError( + ("Object set in Alembic ROP is not a camera: " + "{} (type: {})").format(camera, camera.type().name()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 10100b698e..e1f1dc116e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -19,16 +21,24 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "See log for details. " "Invalid nodes: {0}".format(invalid) + raise PublishValidationError( + "See log for details. " "Invalid nodes: {0}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): - import hou + output_node = instance.data.get("output_node") - output = instance.data["output_node"] + if output_node is None: + node = instance.data["members"][0] + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] rop = instance.data["members"][0] build_from_path = rop.parm("build_from_path").eval() @@ -52,7 +62,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the current frame then we can't # check whether the path names are correct. So we'll just issue a @@ -73,7 +83,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -81,7 +91,7 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = geo.primStringAttribValues(path_attr) # Ensure all primitives are set to a valid path @@ -93,4 +103,4 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): "Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index 02b650d48e..c18ad7a1b7 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishXmlValidationError +from openpype.pipeline import PublishValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -24,10 +24,9 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise PublishXmlValidationError( - self, - message="Output node(s) `%s` are incorrect. " % invalid, - title=self.label + raise PublishValidationError( + "Output node(s) are incorrect", + title="Invalid output node(s)" ) @classmethod @@ -35,7 +34,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + output_node = instance.data.get("output_node") if output_node is None: node = instance.data["members"][0] diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 79b3e894e5..f7a4c762cc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -2,22 +2,30 @@ import openpype.api import pyblish.api import hou +from openpype.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from openpype.pipeline.publish import RepairAction -class ValidateWorkfilePaths(pyblish.api.InstancePlugin): +class ValidateWorkfilePaths( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate workfile paths so they are absolute.""" order = pyblish.api.ValidatorOrder families = ["workfile"] hosts = ["houdini"] label = "Validate Workfile Paths" - actions = [openpype.api.RepairAction] + actions = [RepairAction] optional = True node_types = ["file", "alembic"] prohibited_vars = ["$HIP", "$JOB"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid() self.log.info( "node types to check: {}".format(", ".join(self.node_types))) @@ -29,13 +37,18 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): self.log.error( "{}: {}".format(param.path(), param.unexpandedString())) - raise RuntimeError("Invalid paths found") + raise PublishValidationError( + "Invalid paths found", title=self.label) @classmethod def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): + # it might return None for some reason + if not param: + continue # skip nodes we are not interested in + cls.log.debug(param) if param.node().type().name() not in cls.node_types: continue From a1377a87d6001acb91429022b14a1db12e3f57a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:17 +0200 Subject: [PATCH 030/202] :construction: dealing with identifiers --- .../plugins/create/create_alembic_camera.py | 42 +++++++++---------- .../plugins/create/create_pointcache.py | 13 +++--- 2 files changed, 27 insertions(+), 28 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index eef86005f5..294c99744b 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,46 +1,44 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateAlembicCamera(plugin.Creator): +class CreateAlembicCamera(plugin.HoudiniCreator): """Single baked camera from Alembic ROP""" - name = "camera" + identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" family = "camera" icon = "camera" - def __init__(self, *args, **kwargs): - super(CreateAlembicCamera, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - # Set node type to create for output - self.data.update({"node_type": "alembic"}) + instance = super(CreateAlembicCamera, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/%s.abc" % self.name, + "filename": "$HIP/pyblish/{}.abc".format(subset_name), "use_sop_path": False, } - if self.nodes: - node = self.nodes[0] - path = node.path() + if self._nodes: + path = self._nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) parms.update({"root": "/" + root, "objects": remainder}) - instance.setParms(parms) + instance_node.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance.parm("use_sop_path").lock(True) - instance.parm("trange").set(1) + instance_node.parm("use_sop_path").lock(True) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 3365e25091..889e27ba51 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,18 +1,19 @@ # -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" from openpype.hosts.houdini.api import plugin from openpype.pipeline import CreatedInstance -import hou - class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - identifier = "pointcache" + identifier = "io.openpype.creators.houdini.pointcache" label = "Point Cache" family = "pointcache" icon = "gears" def create(self, subset_name, instance_data, pre_create_data): + import hou + instance_data.pop("active", None) instance_data.update({"node_type": "alembic"}) @@ -21,7 +22,7 @@ class CreatePointCache(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("members")[0]) + instance_node = hou.node(instance.get("instance_node")) parms = { "use_sop_path": True, "build_from_path": True, @@ -32,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if instance_node: - parms["sop_path"] = instance_node.path() + if self._nodes: + parms["sop_path"] = self._nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From dade064eb3f50b6b70aedec4e6d0cd487f7a9a70 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Sep 2022 10:39:30 +0200 Subject: [PATCH 031/202] :construction: solving hda publishing --- .../houdini/plugins/create/create_hda.py | 53 +++++++------------ .../houdini/plugins/publish/extract_hda.py | 2 +- 2 files changed, 21 insertions(+), 34 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b98da8b8bb..b1751d0b6c 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,28 +1,22 @@ # -*- coding: utf-8 -*- -import hou - +"""Creator plugin for creating publishable Houdini Digital Assets.""" from openpype.client import ( get_asset_by_name, get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import lib -from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api import (lib, plugin) -class CreateHDA(plugin.Creator): +class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - name = "hda" + identifier = "hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" maintain_selection = False - def __init__(self, *args, **kwargs): - super(CreateHDA, self).__init__(*args, **kwargs) - self.data.pop("active", None) - def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" @@ -40,28 +34,34 @@ class CreateHDA(plugin.Creator): } return subset_name.lower() in existing_subset_names_low - def _process(self, instance): - subset_name = self.data["subset"] - # get selected nodes - out = hou.node("/obj") - self.nodes = hou.selectedNodes() + def create(self, subset_name, instance_data, pre_create_data): + import hou - if (self.options or {}).get("useSelection") and self.nodes: - # if we have `use selection` enabled and we have some + instance_data.pop("active", None) + + instance = super(CreateHDA, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = hou.node(instance.get("instance_node")) + out = hou.node("/obj") + if self._nodes: + # if we have `use selection` enabled, and we have some # selected nodes ... subnet = out.collapseIntoSubnet( self.nodes, - subnet_name="{}_subnet".format(self.name)) + subnet_name="{}_subnet".format(subset_name)) subnet.moveToGoodPosition() to_hda = subnet else: to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(self.name)) + "subnet", node_name="{}_subnet".format(subset_name)) if not to_hda.type().definition(): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise Exception( + raise plugin.OpenPypeCreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( @@ -78,17 +78,4 @@ class CreateHDA(plugin.Creator): hda_node.setName(subset_name) - # delete node created by Avalon in /out - # this needs to be addressed in future Houdini workflow refactor. - - hou.node("/out/{}".format(subset_name)).destroy() - - try: - lib.imprint(hda_node, self.data) - except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( - ("Cannot set metadata on asset. Might be that it already is " - "OpenPype asset.") - ) - return hda_node diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 4352939a2c..50a7ce2908 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -16,7 +16,7 @@ class ExtractHDA(openpype.api.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data["members"][0] + hda_node = instance.data.get("members")[0] hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From 01c60e6fa777029ce50864d5cae843e24f797fb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:02 +0200 Subject: [PATCH 032/202] :recycle: rename selected node, instance node creation n method --- openpype/hosts/houdini/api/plugin.py | 32 ++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index f300496a43..8180676ce8 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -94,23 +94,41 @@ class Creator(LegacyCreator): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator): - _nodes = [] + selected_nodes = [] + + def _create_instance_node( + self, node_name, parent, + node_type="geometry"): + # type: (str, str, str) -> hou.Node + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + hou.Node: Newly created instance node. + + """ + parent_node = hou.node(parent) + instance_node = parent_node.createNode( + node_type, node_name=node_name) + instance_node.moveToGoodPosition() + return instance_node def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): - self._nodes = hou.selectedNodes() + self.selected_nodes = hou.selectedNodes() # Get the node type and remove it from the data, not needed node_type = instance_data.pop("node_type", None) if node_type is None: node_type = "geometry" - # Get out node - out = hou.node("/out") - instance_node = out.createNode( - node_type, node_name=subset_name) - instance_node.moveToGoodPosition() + instance_node = self._create_instance_node( + subset_name, "/out", node_type, pre_create_data) # wondering if we'll ever need more than one member here # in Houdini From fc5c07f1ca08021048acc99c24bad1e7656aa378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:25 +0200 Subject: [PATCH 033/202] :recycle: selected nodes argument rename --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 4 ++-- openpype/hosts/houdini/plugins/create/create_pointcache.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 294c99744b..483c4205a8 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -29,8 +29,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): "use_sop_path": False, } - if self._nodes: - path = self._nodes[0].path() + if self.selected_nodes: + path = self.selected_nodes.path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 889e27ba51..239f3ce50b 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -33,8 +33,8 @@ class CreatePointCache(plugin.HoudiniCreator): "filename": "$HIP/pyblish/{}.abc".format(subset_name) } - if self._nodes: - parms["sop_path"] = self._nodes[0].path() + if self.selected_nodes: + parms["sop_path"] = self.selected_nodes[0].path() instance_node.setParms(parms) instance_node.parm("trange").set(1) From 9b32b4926ce8eb3356c9aea899acf05b0fe77ece Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 9 Sep 2022 18:40:47 +0200 Subject: [PATCH 034/202] :construction: hda creator refactor --- .../houdini/plugins/create/create_hda.py | 73 ++++++++++--------- 1 file changed, 38 insertions(+), 35 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b1751d0b6c..67e338b1b3 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -34,6 +34,43 @@ class CreateHDA(plugin.HoudiniCreator): } return subset_name.lower() in existing_subset_names_low + def _create_instance_node( + self, node_name, parent, node_type="geometry"): + parent_node = hou.node("/obj") + if self.selected_nodes: + # if we have `use selection` enabled, and we have some + # selected nodes ... + subnet = parent_node.collapseIntoSubnet( + self._nodes, + subnet_name="{}_subnet".format(node_name)) + subnet.moveToGoodPosition() + to_hda = subnet + else: + to_hda = parent_node.createNode( + "subnet", node_name="{}_subnet".format(node_name)) + if not to_hda.type().definition(): + # if node type has not its definition, it is not user + # created hda. We test if hda can be created from the node. + if not to_hda.canCreateDigitalAsset(): + raise plugin.OpenPypeCreatorError( + "cannot create hda from node {}".format(to_hda)) + + hda_node = to_hda.createDigitalAsset( + name=node_name, + hda_file_name="$HIP/{}.hda".format(node_name) + ) + hda_node.layoutChildren() + elif self._check_existing(node_name): + raise plugin.OpenPypeCreatorError( + ("subset {} is already published with different HDA" + "definition.").format(node_name)) + else: + hda_node = to_hda + + hda_node.setName(node_name) + return hda_node + + def create(self, subset_name, instance_data, pre_create_data): import hou @@ -44,38 +81,4 @@ class CreateHDA(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = hou.node(instance.get("instance_node")) - out = hou.node("/obj") - if self._nodes: - # if we have `use selection` enabled, and we have some - # selected nodes ... - subnet = out.collapseIntoSubnet( - self.nodes, - subnet_name="{}_subnet".format(subset_name)) - subnet.moveToGoodPosition() - to_hda = subnet - else: - to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(subset_name)) - if not to_hda.type().definition(): - # if node type has not its definition, it is not user - # created hda. We test if hda can be created from the node. - if not to_hda.canCreateDigitalAsset(): - raise plugin.OpenPypeCreatorError( - "cannot create hda from node {}".format(to_hda)) - - hda_node = to_hda.createDigitalAsset( - name=subset_name, - hda_file_name="$HIP/{}.hda".format(subset_name) - ) - hda_node.layoutChildren() - elif self._check_existing(subset_name): - raise plugin.OpenPypeCreatorError( - ("subset {} is already published with different HDA" - "definition.").format(subset_name)) - else: - hda_node = to_hda - - hda_node.setName(subset_name) - - return hda_node + return instance From 4624fb930ff580b1f33c34ec8d3426f7e6fafd4d Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:26:49 +0200 Subject: [PATCH 035/202] :recycle: minor fixes --- .../houdini/plugins/publish/validate_alembic_face_sets.py | 5 ++--- .../houdini/plugins/publish/validate_alembic_input_node.py | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 7c1d068390..10681e4b72 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,7 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -18,7 +17,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index aa572dc3bb..4355bc7921 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError From 2c59d6317932cd6040b9c77f316112922b850a79 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:27:28 +0200 Subject: [PATCH 036/202] :recycle: change vdb cache creator to new publisher --- .../plugins/create/create_vbd_cache.py | 38 +++++++++---------- .../publish/validate_vdb_output_node.py | 10 +++-- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 242c21fc72..1a5011745f 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,38 +1,36 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating VDB Caches.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateVDBCache(plugin.Creator): +class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" - + identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" family = "vdbcache" icon = "cloud" - def __init__(self, *args, **kwargs): - super(CreateVDBCache, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "geometry"}) - # Set node type to create for output - self.data["node_type"] = "geometry" + instance = super(CreateVDBCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name), "initsim": True, "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"soppath": node.path()}) + if self.selected_nodes: + parms["soppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 9be2635a9e..a9f8b38e7e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" @@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" " of type VDB!" + raise PublishValidationError( + "Node connected to the output node is not" " of type VDB!", + title=self.label ) @classmethod From dff7c27562dedda5ce3a1daece04840121b8001a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Sep 2022 01:28:25 +0200 Subject: [PATCH 037/202] :bug: fix function call --- openpype/hosts/houdini/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 8180676ce8..28830bdc64 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -128,7 +128,7 @@ class HoudiniCreator(NewCreator): node_type = "geometry" instance_node = self._create_instance_node( - subset_name, "/out", node_type, pre_create_data) + subset_name, "/out", node_type) # wondering if we'll ever need more than one member here # in Houdini From c5e7d8f93c620abbcc64a6fdcb7a6824558f57f7 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:33:20 +0200 Subject: [PATCH 038/202] :recycle: handle file saving --- openpype/hosts/houdini/api/pipeline.py | 7 +++++++ .../houdini/plugins/publish/increment_current_file.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b9246251a2..4ff24c8004 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -166,6 +166,13 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx = self._create_context_node() return lib.read(op_ctx) + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) + def on_file_event_callback(event): if event == hou.hipFileEventType.AfterLoad: diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c990f481d3..92ac9fbeca 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -27,4 +27,4 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save(new_filepath) + host.save_file(new_filepath) From 99bf89cafae2e94ec927d948811e60e5b15cfb44 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:02 +0200 Subject: [PATCH 039/202] :recycle: handle frame data --- openpype/hosts/houdini/api/lib.py | 27 +++++++++++++++++++ openpype/hosts/houdini/api/plugin.py | 2 +- .../houdini/plugins/publish/collect_frames.py | 2 ++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index f438944b09..d0a3068531 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -530,3 +530,30 @@ def get_template_from_value(key, value): raise TypeError("Unsupported type: %r" % type(value)) return parm + + +def get_frame_data(node): + """Get the frame data: start frame, end frame and steps. + + Args: + node(hou.Node) + + Returns: + dict: frame data for star, end and steps. + + """ + data = {} + + if node.parm("trange") is None: + + return data + + if node.evalParm("trange") == 0: + self.log.debug("trange is 0") + return data + + data["frameStart"] = node.evalParm("f1") + data["frameEnd"] = node.evalParm("f2") + data["steps"] = node.evalParm("f3") + + return data \ No newline at end of file diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 28830bdc64..ee73745651 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, get_frame_data class OpenPypeCreatorError(CreatorError): diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cad894cc3f..cd94635c29 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -25,6 +25,8 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): ropnode = instance.data["members"][0] + frame_data = lib.get_frame_data(ropnode) + instance.data.update(frame_data) start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) From bd8b2c7d70a13a85f89ab4f60489a8114e9cdf01 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:26 +0200 Subject: [PATCH 040/202] :recycle: arnold creator --- .../plugins/create/create_arnold_ass.py | 45 ++++++++++--------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 72088e43b0..b3926b8cee 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Arnold ASS files.""" from openpype.hosts.houdini.api import plugin -class CreateArnoldAss(plugin.Creator): +class CreateArnoldAss(plugin.HoudiniCreator): """Arnold .ass Archive""" + identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" family = "ass" icon = "magic" @@ -12,42 +15,40 @@ class CreateArnoldAss(plugin.Creator): # Default extension: `.ass` or `.ass.gz` ext = ".ass" - def __init__(self, *args, **kwargs): - super(CreateArnoldAss, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "arnold"}) - self.data.update({"node_type": "arnold"}) + instance = super(CreateArnoldAss, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def process(self): - node = super(CreateArnoldAss, self).process() + instance_node = hou.node(instance.get("instance_node")) - basename = node.name() - node.setName(basename + "_ASS", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ASS", unique_name=True) # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export - parm_template_group = node.parmTemplateGroup() + parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) - node.setParmTemplateGroup(parm_template_group) + instance_node.setParmTemplateGroup(parm_template_group) - filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext) + filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) parms = { # Render frame range "trange": 1, - # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1 + "ar_ass_export_enable": 1, + "filename": filepath } - node.setParms(parms) - # Lock the ASS export attribute - node.parm("ar_ass_export_enable").lock(True) - - # Lock some Avalon attributes - to_lock = ["family", "id"] + # Lock any parameters in this list + to_lock = ["ar_ass_export_enable", "family", "id"] for name in to_lock: - parm = node.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 93b3b0403401075596e9951c06fc5414e7fa50a0 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 16 Sep 2022 00:34:42 +0200 Subject: [PATCH 041/202] :recycle: composite creator --- .../plugins/create/create_composite.py | 51 +++++++++---------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index e278708076..96d8ca9fd5 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,44 +1,43 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateCompositeSequence(plugin.Creator): +class CreateCompositeSequence(plugin.HoudiniCreator): """Composite ROP to Image Sequence""" + identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" family = "imagesequence" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreateCompositeSequence, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou + from pprint import pformat - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "comp"}) - # Type of ROP node to create - self.data.update({"node_type": "comp"}) + instance = super(CreateCompositeSequence, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + self.log.info(pformat(instance)) + print(pformat(instance)) + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. + filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + parms = { + "copoutput": filepath + } - """ - parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} - - if self.nodes: - node = self.nodes[0] - parms.update({"coppath": node.path()}) - - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] for name in to_lock: - try: - parm = instance.parm(name) - parm.lock(True) - except AttributeError: - # missing lock pattern - self.log.debug( - "missing lock pattern {}".format(name)) + parm = instance_node.parm(name) + parm.lock(True) + From ec4bcc474b7a3c3701ae45c8008536d0fc3d7992 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 12:25:48 +0200 Subject: [PATCH 042/202] :recycle: replace exceptions and asserts in validators --- .../plugins/publish/validate_bypass.py | 12 +++++---- .../publish/validate_cop_output_node.py | 19 ++++++++----- .../publish/validate_file_extension.py | 11 +++++--- .../validate_houdini_license_category.py | 10 ++++--- .../publish/validate_mkpaths_toggled.py | 13 ++++----- .../plugins/publish/validate_no_errors.py | 9 ++++--- .../publish/validate_remote_publish.py | 27 ++++++++++++------- .../validate_remote_publish_enabled.py | 11 +++++--- .../publish/validate_sop_output_node.py | 9 ++++--- .../validate_usd_layer_path_backslashes.py | 8 +++--- .../publish/validate_usd_model_and_shade.py | 6 +++-- .../publish/validate_usd_output_node.py | 9 ++++--- .../validate_usd_render_product_names.py | 7 +++-- .../plugins/publish/validate_usd_setdress.py | 7 +++-- .../validate_usd_shade_model_exists.py | 9 ++++--- .../publish/validate_usd_shade_workspace.py | 23 +++++++++------- 16 files changed, 121 insertions(+), 69 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 1b441b8da9..59ab2d2b1b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateBypassed(pyblish.api.InstancePlugin): @@ -11,7 +12,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder - 0.1 + order = pyblish.api.ValidatorOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -26,9 +27,10 @@ class ValidateBypassed(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] - raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." - % rop.path() + raise PublishValidationError( + ("ROP node {} is set to bypass, publishing cannot " + "continue.".format(rop.path())), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 86ddc2adf2..2e99e5fb41 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError + class ValidateCopOutputNode(pyblish.api.InstancePlugin): """Validate the instance COP Output Node. @@ -20,9 +23,10 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -54,7 +58,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Cop2": + raise PublishValidationError( + ("Output node %s is not of category Cop2. " + "This is a bug...").format(output_node.path()), + title=cls.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index f050a41b88..5211cdb919 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import os import pyblish.api from openpype.hosts.houdini.api import lib +from openpype.pipeline import PublishValidationError class ValidateFileExtension(pyblish.api.InstancePlugin): @@ -29,8 +31,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "ROP node has incorrect " "file extension: %s" % invalid + raise PublishValidationError( + "ROP node has incorrect file extension: {}".format(invalid), + title=self.label ) @classmethod @@ -53,7 +56,9 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): for family in families: extension = cls.family_extensions.get(family, None) if extension is None: - raise RuntimeError("Unsupported family: %s" % family) + raise PublishValidationError( + "Unsupported family: {}".format(family), + title=cls.label) if output_extension != extension: return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f5f03aa844..f1c52f22c1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): @@ -24,7 +26,7 @@ class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError( - "USD Publishing requires a full Commercial " - "license. You are on: %s" % license - ) + raise PublishValidationError( + ("USD Publishing requires a full Commercial " + "license. You are on: {}").format(license), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index be6a798a95..9d1f92a101 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" @@ -14,10 +15,10 @@ class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid - ) + raise PublishValidationError( + ("Found ROP node with Create Intermediate " + "Directories turned off: {}".format(invalid)), + title=self.label) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 77e7cc9ff7..fd396ad8c9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError def cook_in_range(node, start, end): @@ -28,7 +29,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] label = "Validate no errors" @@ -62,4 +63,6 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): errors = get_errors(node) if errors: self.log.error(errors) - raise RuntimeError("Node has errors: %s" % node.path()) + raise PublishValidationError( + "Node has errors: {}".format(node.path()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 0ab182c584..7349022681 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,9 @@ +# -*-coding: utf-8 -*- import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError import hou @@ -27,17 +29,24 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): # We ensure it's a shell node and that it has the pre-render script # set correctly. Plus the shell script it will trigger should be # completely empty (doing nothing) - assert node.type().name() == "shell", "Must be shell ROP node" - assert node.parm("command").eval() == "", "Must have no command" - assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert ( - node.parm("prerender").eval() == cmd - ), "REMOTE_PUBLISH node does not have correct prerender script." - assert ( - node.parm("lprerender").eval() == "python" - ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + if node.type().name() != "shell": + self.raise_error("Must be shell ROP node") + if node.parm("command").eval() != "": + self.raise_error("Must have no command") + if node.parm("shellexec").eval(): + self.raise_error("Must not execute in shell") + if node.parm("prerender").eval() != cmd: + self.raise_error(("REMOTE_PUBLISH node does not have " + "correct prerender script.")) + if node.parm("lprerender").eval() != "python": + self.raise_error(("REMOTE_PUBLISH node prerender script " + "type not set to 'python'")) @classmethod def repair(cls, context): """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) + + def raise_error(self, message): + self.log.error(message) + raise PublishValidationError(message, title=self.label) \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index afc8df7528..8ec62f4e85 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -18,10 +20,12 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=self.label) if node.isBypassed(): - raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + raise PublishValidationError( + "REMOTE_PUBLISH must not be bypassed.", title=self.label) @classmethod def repair(cls, context): @@ -29,7 +33,8 @@ class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=cls.label) cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index c18ad7a1b7..a1a96120e2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -58,10 +58,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Sop": + raise PublishValidationError( + ("Output node {} is not of category Sop. " + "This is a bug.").format(output_node.path()), + title=cls.label) # Ensure the node is cooked and succeeds to cook so we can correctly # check for its geometry data. diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 95cad82085..3e593a9508 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): @@ -44,7 +46,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): invalid.append(layer) if invalid: - raise RuntimeError( + raise PublishValidationError(( "Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering." - ) + "This is invalid for HUSK USD rendering."), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index bdb7c05319..3ca0fd0298 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -1,7 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib - +from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux @@ -55,7 +56,8 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): if invalid: prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) - raise RuntimeError("Found invalid primitives: %s" % prim_paths) + raise PublishValidationError( + "Found invalid primitives: {}".format(prim_paths)) class ValidateUsdShade(ValidateUsdModel): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 0c38ccd4be..9a4d292778 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateUSDOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +22,10 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 36336a03ae..02c44ab94e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- +import os import pyblish.api -import os +from openpype.pipeline import PublishValidationError class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): @@ -28,4 +30,5 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): if invalid: for message in invalid: self.log.error(message) - raise RuntimeError("USD Render Paths are invalid.") + raise PublishValidationError( + "USD Render Paths are invalid.", title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 835cd5977a..89ae8b8ad9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUsdSetDress(pyblish.api.InstancePlugin): @@ -47,8 +49,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): invalid.append(node) if invalid: - raise RuntimeError( + raise PublishValidationError(( "SetDress contains local geometry. " "This is not allowed, it must be an assembly " - "of referenced assets." + "of referenced assets."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index f08c7c72c5..c4f118ac3b 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import re import pyblish.api @@ -5,6 +6,7 @@ import pyblish.api from openpype.client import get_subset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -32,7 +34,8 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: - raise RuntimeError( - "USD Model subset not found: " - "%s (%s)" % (model_subset, asset_name) + raise PublishValidationError( + ("USD Model subset not found: " + "{} ({})").format(model_subset, asset_name), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2781756272..2ff2702061 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError import hou @@ -12,7 +13,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" @@ -39,13 +40,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): if node_type != other_node_type: continue - # Get highest version + # Get the highest version highest = max(highest, other_version) if version != highest: - raise RuntimeError( - "Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest) + raise PublishValidationError( + ("Shading Workspace is not the latest version." + " Found {}. Latest is {}.").format(version, highest), + title=self.label ) # There were some issues with the editable node not having the right @@ -56,8 +58,9 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError( - "Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values." + raise PublishValidationError( + ("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values."), + title=self.label ) From 08ac24080f863e904b4ddec4b53a9c9f502f9685 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 15:02:04 +0200 Subject: [PATCH 043/202] :recycle: convert creators --- .../plugins/create/create_redshift_proxy.py | 40 +++++++------- .../plugins/create/create_redshift_rop.py | 54 +++++++++---------- .../houdini/plugins/create/create_usd.py | 38 ++++++------- .../plugins/create/create_usdrender.py | 37 ++++++------- 4 files changed, 85 insertions(+), 84 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index da4d80bf2b..d4bfe9d253 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -1,18 +1,20 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Redshift proxies.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftProxy(plugin.Creator): +class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" - + identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" family = "redshiftproxy" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateRedshiftProxy, self).__init__(*args, **kwargs) - + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) # Redshift provides a `Redshift_Proxy_Output` node type which shows # a limited set of parameters by default and is set to extract a @@ -21,28 +23,26 @@ class CreateRedshiftProxy(plugin.Creator): # why this happens. # TODO: Somehow enforce so that it only shows the original limited # attributes of the Redshift_Proxy_Output node type - self.data.update({"node_type": "Redshift_Proxy_Output"}) + instance_data.update({"node_type": "Redshift_Proxy_Output"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftProxy, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs', + "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name), } - if self.nodes: - node = self.nodes[0] - path = node.path() - parms["RS_archive_sopPath"] = path + if self.selected_nodes: + parms["RS_archive_sopPath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] + to_lock = ["family", "id", "prim_to_detail_pattern"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 6949ca169b..2bb8325623 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,41 +1,40 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin to create Redshift ROP.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftROP(plugin.Creator): +class CreateRedshiftROP(plugin.HoudiniCreator): """Redshift ROP""" - + identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" family = "redshift_rop" icon = "magic" defaults = ["master"] - def __init__(self, *args, **kwargs): - super(CreateRedshiftROP, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa + + instance_data.pop("active", None) + instance_data.update({"node_type": "Redshift_ROP"}) + # Add chunk size attribute + instance_data["chunkSize"] = 10 # Clear the family prefix from the subset - subset = self.data["subset"] + subset = subset_name subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] - self.data["subset"] = subset_no_prefix + subset_name = subset_no_prefix - # Add chunk size attribute - self.data["chunkSize"] = 10 + instance = super(CreateRedshiftROP, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_node = hou.node(instance.get("instance_node")) - self.data.update({"node_type": "Redshift_ROP"}) - - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ - basename = instance.name() - instance.setName(basename + "_ROP", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop try: @@ -43,11 +42,12 @@ class CreateRedshiftROP(plugin.Creator): "Redshift_IPR", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise Exception(("Cannot create Redshift node. Is Redshift " - "installed and enabled?")) + raise plugin.OpenPypeCreatorError( + ("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP - ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1)) # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) @@ -61,10 +61,10 @@ class CreateRedshiftROP(plugin.Creator): "RS_outputMultilayerMode": 0, # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", } - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5bcb7840c0..8502a4e5e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,39 +1,39 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating USDs.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" - + identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" family = "usd" icon = "gears" enabled = False - def __init__(self, *args, **kwargs): - super(CreateUSD, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usd"}) - self.data.update({"node_type": "usd"}) + instance = super(CreateUSD, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - - """ parms = { - "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), "enableoutputprocessor_simplerelativepaths": False, } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = [ @@ -43,5 +43,5 @@ class CreateUSD(plugin.Creator): "id", ] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index cb3fe3f02b..e5c61d2984 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,42 +1,43 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin for creating USD renders.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSDRender(plugin.Creator): +class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" - + identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" family = "usdrender" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateUSDRender, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.parent = hou.node("/stage") + instance_data["parent"] = hou.node("/stage") # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usdrender"}) - self.data.update({"node_type": "usdrender"}) + instance = super(CreateUSDRender, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - """ parms = { # Render frame range "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) - instance.setParms(parms) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] for name in to_lock: - parm = instance.parm(name) + parm = instance_node.parm(name) parm.lock(True) From 71caefe44915f9618e276812408d29ebd4ca5a51 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:28 +0200 Subject: [PATCH 044/202] :recycle: refactor parameter locking --- openpype/hosts/houdini/api/plugin.py | 15 +++++++++++++++ .../houdini/plugins/create/create_arnold_ass.py | 4 +--- .../houdini/plugins/create/create_composite.py | 11 ++--------- .../houdini/plugins/create/create_pointcache.py | 4 +--- .../plugins/create/create_redshift_proxy.py | 4 +--- .../houdini/plugins/create/create_redshift_rop.py | 4 +--- .../hosts/houdini/plugins/create/create_usd.py | 4 +--- .../houdini/plugins/create/create_usdrender.py | 4 +--- 8 files changed, 23 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee73745651..5c52cb416b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -150,6 +150,21 @@ class HoudiniCreator(NewCreator): OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + def lock_parameters(self, node, parameters): + """Lock list of specified parameters on the node. + + Args: + node (hou.Node): Houdini node to lock parameters on. + parameters (list of str): List of parameter names. + + """ + for name in parameters: + try: + parm = node.parm(name) + parm.lock(True) + except AttributeError: + self.log.debug("missing lock pattern {}".format(name)) + def collect_instances(self): for instance in list_instances(creator_id=self.identifier): created_instance = CreatedInstance.from_existing( diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index b3926b8cee..a48658ab99 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -49,6 +49,4 @@ class CreateArnoldAss(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 96d8ca9fd5..1a9c56571a 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -13,8 +13,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): icon = "gears" def create(self, subset_name, instance_data, pre_create_data): - import hou - from pprint import pformat + import hou # noqa instance_data.pop("active", None) instance_data.update({"node_type": "comp"}) @@ -24,10 +23,7 @@ class CreateCompositeSequence(plugin.HoudiniCreator): instance_data, pre_create_data) # type: CreatedInstance - self.log.info(pformat(instance)) - print(pformat(instance)) instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) parms = { "copoutput": filepath @@ -37,7 +33,4 @@ class CreateCompositeSequence(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) - + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 239f3ce50b..124936d285 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -41,6 +41,4 @@ class CreatePointCache(plugin.HoudiniCreator): # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index d4bfe9d253..8b6a68437b 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -43,6 +43,4 @@ class CreateRedshiftProxy(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id", "prim_to_detail_pattern"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 2bb8325623..2cbe9bfda1 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -65,6 +65,4 @@ class CreateRedshiftROP(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 8502a4e5e9..51ed8237c5 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -42,6 +42,4 @@ class CreateUSD(plugin.HoudiniCreator): "family", "id", ] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index e5c61d2984..f78f0bed50 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -38,6 +38,4 @@ class CreateUSDRender(plugin.HoudiniCreator): # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance_node.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) From df2f68db9798bddffb8ee8fcfcf08764dffc44e9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:06:56 +0200 Subject: [PATCH 045/202] :recycle: move splitext to lib --- openpype/hosts/houdini/api/lib.py | 23 ++++++++++++++++++- .../houdini/plugins/publish/collect_frames.py | 21 +++++++---------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index d0a3068531..8d6f666eb7 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import sys +import os import uuid import logging from contextlib import contextmanager @@ -556,4 +557,24 @@ def get_frame_data(node): data["frameEnd"] = node.evalParm("f2") data["steps"] = node.evalParm("f3") - return data \ No newline at end of file + return data + + +def splitext(name, allowed_multidot_extensions): + # type: (str, list) -> tuple + """Split file name to name and extension. + + Args: + name (str): File name to split. + allowed_multidot_extensions (list of str): List of allowed multidot + extensions. + + Returns: + tuple: Name and extension. + """ + + for ext in allowed_multidot_extensions: + if name.endswith(ext): + return name[:-len(ext)], ext + + return os.path.splitext(name) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index cd94635c29..9108432384 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -1,19 +1,13 @@ +# -*- coding: utf-8 -*- +"""Collector plugin for frames data on ROP instances.""" import os import re -import hou +import hou # noqa import pyblish.api from openpype.hosts.houdini.api import lib -def splitext(name, allowed_multidot_extensions): - - for ext in allowed_multidot_extensions: - if name.endswith(ext): - return name[:-len(ext)], ext - - return os.path.splitext(name) - class CollectFrames(pyblish.api.InstancePlugin): """Collect all frames which would be saved from the ROP nodes""" @@ -40,13 +34,13 @@ class CollectFrames(pyblish.api.InstancePlugin): self.log.warning("Using current frame: {}".format(hou.frame())) output = output_parm.eval() - _, ext = splitext(output, + _, ext = lib.splitext(output, allowed_multidot_extensions=[".ass.gz"]) file_name = os.path.basename(output) result = file_name # Get the filename pattern match from the output - # path so we can compute all frames that would + # path, so we can compute all frames that would # come out from rendering the ROP node if there # is a frame pattern in the name pattern = r"\w+\.(\d+)" + re.escape(ext) @@ -65,8 +59,9 @@ class CollectFrames(pyblish.api.InstancePlugin): # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) - def create_file_list(self, match, start_frame, end_frame): - """Collect files based on frame range and regex.match + @staticmethod + def create_file_list(match, start_frame, end_frame): + """Collect files based on frame range and `regex.match` Args: match(re.match): match object From d59861a6539dd69e51180245ab6ce2164343aaab Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 20 Sep 2022 19:07:21 +0200 Subject: [PATCH 046/202] :bug: update representation creation --- .../plugins/publish/extract_composite.py | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index eb77a91d62..4c91d51efd 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -3,7 +3,7 @@ import os import pyblish.api import openpype.api -from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop, splitext class ExtractComposite(openpype.api.Extractor): @@ -28,8 +28,24 @@ class ExtractComposite(openpype.api.Extractor): render_rop(ropnode) - if "files" not in instance.data: - instance.data["files"] = [] + output = instance.data["frames"] + _, ext = splitext(output[0], []) + ext = ext.lstrip(".") - frames = instance.data["frames"] - instance.data["files"].append(frames) + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": ext, + "files": output, + "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + } + + from pprint import pformat + + self.log.info(pformat(representation)) + + instance.data["representations"].append(representation) \ No newline at end of file From 3a935c968c97bd19695ae3888c9904a961397d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 21 Sep 2022 18:36:23 +0200 Subject: [PATCH 047/202] :rotating_light: cosmetic changes --- openpype/hosts/houdini/api/lib.py | 3 +++ openpype/hosts/houdini/api/pipeline.py | 7 ++++--- openpype/hosts/houdini/api/plugin.py | 5 +++-- .../houdini/plugins/create/create_alembic_camera.py | 6 ++++-- .../hosts/houdini/plugins/create/create_arnold_ass.py | 4 +++- openpype/hosts/houdini/plugins/create/create_hda.py | 9 ++++----- .../hosts/houdini/plugins/publish/extract_composite.py | 2 +- .../houdini/plugins/publish/increment_current_file.py | 6 +++--- .../hosts/houdini/plugins/publish/validate_camera_rop.py | 2 +- .../houdini/plugins/publish/validate_remote_publish.py | 2 +- 10 files changed, 27 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 8d6f666eb7..3426040d65 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -20,6 +20,7 @@ self._parent = None log = logging.getLogger(__name__) JSON_PREFIX = "JSON:::" + def get_asset_fps(): """Return current asset fps.""" return get_current_project_asset()["data"].get("fps") @@ -418,6 +419,8 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects data = {} + if not node: + return data for parameter in node.spareParms(): value = parameter.eval() # test if value is json encoded dict diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 4ff24c8004..d64479fc14 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -91,10 +91,11 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): def save_workfile(self, dst_path=None): # Force forwards slashes to avoid segfault - filepath = dst_path.replace("\\", "/") - hou.hipFile.save(file_name=filepath, + if dst_path: + dst_path = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=dst_path, save_to_recent_files=True) - return filepath + return dst_path def open_workfile(self, filepath): # Force forwards slashes to avoid segfault diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 5c52cb416b..897696533f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -14,7 +14,7 @@ from openpype.pipeline import ( ) from openpype.lib import BoolDef from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read, get_frame_data +from .lib import imprint, read class OpenPypeCreatorError(CreatorError): @@ -96,8 +96,9 @@ class Creator(LegacyCreator): class HoudiniCreator(NewCreator): selected_nodes = [] + @staticmethod def _create_instance_node( - self, node_name, parent, + node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node """Create node representing instance. diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 483c4205a8..183ab28b26 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -5,7 +5,7 @@ from openpype.pipeline import CreatedInstance class CreateAlembicCamera(plugin.HoudiniCreator): - """Single baked camera from Alembic ROP""" + """Single baked camera from Alembic ROP.""" identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" @@ -40,5 +40,7 @@ class CreateAlembicCamera(plugin.HoudiniCreator): # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance_node.parm("use_sop_path").lock(True) + to_lock = ["use_sop_path"] + self.lock_parameters(instance_node, to_lock) + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index a48658ab99..40b253d1aa 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -24,7 +24,7 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance = super(CreateArnoldAss, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance instance_node = hou.node(instance.get("instance_node")) @@ -47,6 +47,8 @@ class CreateArnoldAss(plugin.HoudiniCreator): "filename": filepath } + instance_node.setParms(parms) + # Lock any parameters in this list to_lock = ["ar_ass_export_enable", "family", "id"] self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67e338b1b3..67c05b1634 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -5,7 +5,7 @@ from openpype.client import ( get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import (lib, plugin) +from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): @@ -36,6 +36,8 @@ class CreateHDA(plugin.HoudiniCreator): def _create_instance_node( self, node_name, parent, node_type="geometry"): + import hou + parent_node = hou.node("/obj") if self.selected_nodes: # if we have `use selection` enabled, and we have some @@ -70,15 +72,12 @@ class CreateHDA(plugin.HoudiniCreator): hda_node.setName(node_name) return hda_node - def create(self, subset_name, instance_data, pre_create_data): - import hou - instance_data.pop("active", None) instance = super(CreateHDA, self).create( subset_name, instance_data, - pre_create_data) # type: CreatedInstance + pre_create_data) # type: plugin.CreatedInstance return instance diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 4c91d51efd..8dbfd3e08c 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -48,4 +48,4 @@ class ExtractComposite(openpype.api.Extractor): self.log.info(pformat(representation)) - instance.data["representations"].append(representation) \ No newline at end of file + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index 92ac9fbeca..16d9ef9aec 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,7 +2,7 @@ import pyblish.api from openpype.lib import version_up from openpype.pipeline import registered_host - +from openpype.hosts.houdini.api import HoudiniHost class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. @@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): # Filename must not have changed since collecting - host = registered_host() + host = registered_host() # type: HoudiniHost current_file = host.current_file() assert ( context.data["currentFile"] == current_file ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save_file(new_filepath) + host.save_workfile(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index 18fed7fbc4..41b5273e6a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -56,5 +56,5 @@ class ValidateCameraROP(pyblish.api.InstancePlugin): if camera.type().name() != "cam": raise PublishValidationError( ("Object set in Alembic ROP is not a camera: " - "{} (type: {})").format(camera, camera.type().name()), + "{} (type: {})").format(camera, camera.type().name()), title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 7349022681..4e8e5fc0e8 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -49,4 +49,4 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin): def raise_error(self, message): self.log.error(message) - raise PublishValidationError(message, title=self.label) \ No newline at end of file + raise PublishValidationError(message, title=self.label) From 21e98faef021b83fbd961a63d6398795b9db119d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:04 +0200 Subject: [PATCH 048/202] :sparkles: cache collected instances --- openpype/hosts/houdini/api/pipeline.py | 15 +++++++-------- openpype/hosts/houdini/api/plugin.py | 9 +++++++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index d64479fc14..f15cd6f2d5 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -435,10 +435,13 @@ def list_instances(creator_id=None): """ instance_signature = { - "id": "pyblish.avalon.instance", - "identifier": creator_id + "id": "pyblish.avalon.instance" } - return lib.lsattrs(instance_signature) + + return [ + i for i in lib.lsattrs(instance_signature) + if i.paramEval("creator_identifier") == creator_id + ] def remove_instance(instance): @@ -448,12 +451,8 @@ def remove_instance(instance): because it might contain valuable data for artist. """ - nodes = instance.get("members") - if not nodes: - return - # Assume instance node is first node - instance_node = hou.node(nodes[0]) + instance_node = hou.node(instance.data.get("instance_node")) to_delete = None for parameter in instance_node.spareParms(): if parameter.name() == "id" and \ diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 897696533f..fa56b2cb8d 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -133,7 +133,7 @@ class HoudiniCreator(NewCreator): # wondering if we'll ever need more than one member here # in Houdini - instance_data["members"] = [instance_node.path()] + # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( @@ -167,7 +167,12 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - for instance in list_instances(creator_id=self.identifier): + instances = [i for i in self.collection_shared_data.get( + "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + if not instances: + print("not using cached instances") + instances = list_instances(creator_id=self.identifier) + for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self ) From 19d237323d628bd4e656bf379be30ef3f1df6be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Fri, 21 Oct 2022 11:07:23 +0200 Subject: [PATCH 049/202] :bug: fix multiple selection --- .../hosts/houdini/plugins/create/create_alembic_camera.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 183ab28b26..481c6bea77 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin -from openpype.pipeline import CreatedInstance +from openpype.pipeline import CreatedInstance, CreatorError class CreateAlembicCamera(plugin.HoudiniCreator): @@ -30,7 +30,9 @@ class CreateAlembicCamera(plugin.HoudiniCreator): } if self.selected_nodes: - path = self.selected_nodes.path() + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) From 694bc49305d015ee0e773895541e3850695dce2f Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:16:16 +0200 Subject: [PATCH 050/202] :bug: fix caching --- openpype/hosts/houdini/api/plugin.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index fa56b2cb8d..679f7b0d0f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -167,11 +167,13 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - instances = [i for i in self.collection_shared_data.get( - "houdini_cached_instances", []) if i.paramEval("creator_identifier") == self.identifier] + cached_instances = self.collection_shared_data.get( + "houdini_cached_instances") + instances = cached_instances.get(self.identifier) if not instances: print("not using cached instances") instances = list_instances(creator_id=self.identifier) + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self From 6ee68861a8bfa06f346c6f899bc26b5f8d29e670 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 14:40:33 +0200 Subject: [PATCH 051/202] :bug: fix missing keys --- openpype/hosts/houdini/api/plugin.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 679f7b0d0f..2a16b08908 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -168,11 +168,14 @@ class HoudiniCreator(NewCreator): def collect_instances(self): cached_instances = self.collection_shared_data.get( - "houdini_cached_instances") + "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - print("not using cached instances") instances = list_instances(creator_id=self.identifier) + if not self.collection_shared_data.get( + "houdini_cached_instances"): + self.collection_shared_data["houdini_cached_instances"] = {} + self.log.info("Caching instances for {}".format(self.identifier)) self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 for instance in instances: created_instance = CreatedInstance.from_existing( From 696dc78be74dc8d48da411335c5e906db4c669ef Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:26:03 +0200 Subject: [PATCH 052/202] =?UTF-8?q?=F0=9F=A5=85=20catch=20edge=20case=20da?= =?UTF-8?q?ta=20flow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 3426040d65..ceb3b753e0 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -310,6 +310,9 @@ def imprint(node, data, update=False): """ if not data: return + if not node: + self.log.error("Node is not set, calling imprint on invalid data.") + return current_parms = {p.name(): p for p in node.spareParms()} update_parms = [] From 4fe053b109d892a5b5f3770be693ae72d1c19967 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:32:27 +0200 Subject: [PATCH 053/202] :recycle: refactor the use of `members` --- .../plugins/publish/collect_active_state.py | 3 ++- .../houdini/plugins/publish/collect_frames.py | 2 +- .../plugins/publish/collect_instances.py | 2 +- .../publish/collect_members_as_nodes.py | 21 ------------------- .../plugins/publish/collect_output_node.py | 2 +- .../plugins/publish/collect_redshift_rop.py | 2 +- .../publish/collect_render_products.py | 2 +- .../plugins/publish/collect_usd_layers.py | 6 ++++-- .../plugins/publish/extract_alembic.py | 4 +++- .../houdini/plugins/publish/extract_ass.py | 4 +++- .../plugins/publish/extract_composite.py | 4 +++- .../plugins/publish/extract_redshift_proxy.py | 4 +++- .../houdini/plugins/publish/extract_usd.py | 3 ++- .../plugins/publish/extract_usd_layered.py | 2 +- .../plugins/publish/extract_vdb_cache.py | 4 +++- .../validate_abc_primitive_to_detail.py | 17 +++++++-------- .../publish/validate_alembic_face_sets.py | 4 ++-- .../publish/validate_alembic_input_node.py | 3 ++- .../publish/validate_animation_settings.py | 3 ++- .../plugins/publish/validate_bypass.py | 3 ++- .../publish/validate_cop_output_node.py | 15 +++++++++++-- .../publish/validate_file_extension.py | 4 +++- .../plugins/publish/validate_frame_token.py | 3 ++- .../plugins/publish/validate_no_errors.py | 2 +- .../validate_primitive_hierarchy_paths.py | 14 ++++++------- .../publish/validate_sop_output_node.py | 2 +- .../validate_usd_layer_path_backslashes.py | 2 +- .../publish/validate_usd_model_and_shade.py | 4 +++- .../publish/validate_usd_output_node.py | 2 +- .../plugins/publish/validate_usd_setdress.py | 3 ++- .../publish/validate_usd_shade_workspace.py | 2 +- .../publish/validate_vdb_output_node.py | 2 +- 32 files changed, 81 insertions(+), 69 deletions(-) delete mode 100644 openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index dd83721358..cc3f2e7fae 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,4 +1,5 @@ import pyblish.api +import hou class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -24,7 +25,7 @@ class CollectInstanceActiveState(pyblish.api.InstancePlugin): # Check bypass state and reverse active = True - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9108432384..531cdf1249 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -18,7 +18,7 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) frame_data = lib.get_frame_data(ropnode) instance.data.update(frame_data) diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index 0582ee154c..bb85630552 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -84,7 +84,7 @@ class CollectInstances(pyblish.api.ContextPlugin): instance.data["families"] = [instance.data["family"]] instance[:] = [node] - instance.data["members"] = [node] + instance.data["instance_node"] = node.path() instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py b/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py deleted file mode 100644 index 07d71c6605..0000000000 --- a/openpype/hosts/houdini/plugins/publish/collect_members_as_nodes.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -import pyblish.api -import hou - - -class CollectMembersAsNodes(pyblish.api.InstancePlugin): - """Collects instance members as Houdini nodes.""" - - order = pyblish.api.CollectorOrder - 0.01 - hosts = ["houdini"] - label = "Collect Members as Nodes" - - def process(self, instance): - if not instance.data.get("creator_identifier"): - return - - nodes = [ - hou.node(member) for member in instance.data.get("members", []) - ] - - instance.data["members"] = nodes diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index a3989dc776..601ed17b39 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin): import hou - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 33bf74610a..346bdf3421 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index e88c5ea0e6..fcd80e0082 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ class CollectRenderProducts(pyblish.api.InstancePlugin): node = instance.data.get("output_node") if not node: - rop_path = instance.data["members"][0].path() + rop_path = instance.data["instance_node"].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index c21b336403..833add854b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -3,6 +3,8 @@ import os import pyblish.api import openpype.hosts.houdini.api.usd as usdlib +import hou + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -19,7 +21,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): self.log.debug("No output node found..") return - rop_node = instance.data["members"][0] + rop_node = hou.node(instance.get("instance_node")) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -55,7 +57,7 @@ class CollectUsdLayers(pyblish.api.InstancePlugin): layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] # include same USD ROP - layer_inst.append(instance.data["members"][0]) + layer_inst.append(rop_node) # include layer data layer_inst.append((layer, save_path)) diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 0ad7a5069f..cb2d4ef424 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAlembic(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index 864b8d5252..c6417ce18a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAss(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 1042dda8f0..7a1ab36b93 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -4,6 +4,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop, splitext +import hou + class ExtractComposite(publish.Extractor): @@ -14,7 +16,7 @@ class ExtractComposite(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index 4d32b6f97e..29ede98a52 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractRedshiftProxy(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 4f471af597..cbeb5add71 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -5,6 +5,7 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou class ExtractUSD(publish.Extractor): @@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index 7ce51c441b..0288b7363a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ class ExtractUSDLayered(publish.Extractor): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 8a6d3b578a..434d6a2160 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractVDBCache(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = instance.data["members"][0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index 55c705c65b..86e92a052f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -32,19 +32,18 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - + import hou # noqa output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - pattern = rop.parm("prim_to_detail_pattern").eval().strip() + pattern = rop_node.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( "Alembic ROP has no 'Primitive to Detail' pattern. " @@ -52,7 +51,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -60,14 +59,14 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] # Let's assume each attribute is explicitly named for now and has no # wildcards for Primitive to Detail. This simplifies the check. diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index 10681e4b72..44d58cfa36 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api - +import hou class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -24,7 +24,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.data["instance_node"]) facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 4355bc7921..bafb206bd3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -33,7 +34,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 32c5078b9f..f11f9c0c62 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateAnimationSettings(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 59ab2d2b1b..1bf51a986c 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline import PublishValidationError +import hou class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -36,6 +37,6 @@ class ValidateBypassed(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 2e99e5fb41..600dad8161 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- +import sys import pyblish.api +import six from openpype.pipeline import PublishValidationError @@ -34,10 +36,19 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): import hou - output_node = instance.data["output_node"] + try: + output_node = instance.data["output_node"] + except KeyError as e: + six.reraise( + PublishValidationError, + PublishValidationError( + "Can't determine COP output node.", + title=cls.__name__), + sys.exc_info()[2] + ) if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index 5211cdb919..4584e78f4f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -5,6 +5,8 @@ import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline import PublishValidationError +import hou + class ValidateFileExtension(pyblish.api.InstancePlugin): """Validate the output file extension fits the output family. @@ -40,7 +42,7 @@ class ValidateFileExtension(pyblish.api.InstancePlugin): def get_invalid(cls, instance): # Get ROP node from instance - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) # Create lookup for current family in instance families = [] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index b65e9ef62e..b5f6ba71e1 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateFrameToken(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ class ValidateFrameToken(pyblish.api.InstancePlugin): @classmethod def get_invalid(cls, instance): - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index fd396ad8c9..f7c95aaf4e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -38,7 +38,7 @@ class ValidateNoErrors(pyblish.api.InstancePlugin): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance.data["members"][0]) + validate_nodes.append(hou.node(instance.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index e1f1dc116e..d3a4c0cfbf 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder from openpype.pipeline import PublishValidationError +import hou class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -30,18 +31,17 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def get_invalid(cls, instance): output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) if output_node is None: - node = instance.data["members"][0] cls.log.error( "SOP Output node in '%s' does not exist. " - "Ensure a valid SOP output path is set." % node.path() + "Ensure a valid SOP output path is set." % rop_node.path() ) - return [node.path()] + return [rop_node.path()] - rop = instance.data["members"][0] - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -49,14 +49,14 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] cls.log.debug("Checking for attribute: %s" % path_attr) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a1a96120e2..ed7f438729 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -37,7 +37,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin): output_node = instance.data.get("output_node") if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 3e593a9508..972ac59f49 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -26,7 +26,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 3ca0fd0298..a55eb70cb2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -6,6 +6,8 @@ from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux +import hou + def fullname(o): """Get fully qualified class name""" @@ -38,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 9a4d292778..af21efcafc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -36,7 +36,7 @@ class ValidateUSDOutputNode(pyblish.api.InstancePlugin): output_node = instance.data["output_node"] if output_node is None: - node = instance.data["members"][0] + node = hou.node(instance.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index 89ae8b8ad9..01ebc0e828 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -22,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): def process(self, instance): from pxr import UsdGeom + import hou - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index 2ff2702061..bd3366a424 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -20,7 +20,7 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance.data["members"][0] + rop = hou.node(instance.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index a9f8b38e7e..61c1209fc9 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -38,7 +38,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance.data["members"][0].path() + "ROP node '%s'." % instance.get("instance_node") ) return [instance] From d6826524949c471472d0b655931b78f44bdb55e2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:16 +0200 Subject: [PATCH 054/202] :recycle: absolute paths by default --- .../houdini/plugins/create/create_alembic_camera.py | 3 ++- .../hosts/houdini/plugins/create/create_arnold_ass.py | 11 +++++------ .../hosts/houdini/plugins/create/create_composite.py | 8 +++++++- .../hosts/houdini/plugins/create/create_pointcache.py | 9 ++++++++- 4 files changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index 481c6bea77..fec64eb4a1 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -25,7 +25,8 @@ class CreateAlembicCamera(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/{}.abc".format(subset_name), + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)), "use_sop_path": False, } diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 40b253d1aa..8b310753d0 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -28,23 +28,22 @@ class CreateArnoldAss(plugin.HoudiniCreator): instance_node = hou.node(instance.get("instance_node")) - basename = instance_node.name() - instance_node.setName(basename + "_ASS", unique_name=True) - # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) instance_node.setParmTemplateGroup(parm_template_group) - filepath = "$HIP/pyblish/{}.$F4{}".format(subset_name, self.ext) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { # Render frame range "trange": 1, # Arnold ROP settings "ar_ass_file": filepath, - "ar_ass_export_enable": 1, - "filename": filepath + "ar_ass_export_enable": 1 } instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index 1a9c56571a..45af2b0630 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -12,6 +12,8 @@ class CreateCompositeSequence(plugin.HoudiniCreator): family = "imagesequence" icon = "gears" + ext = ".exr" + def create(self, subset_name, instance_data, pre_create_data): import hou # noqa @@ -24,8 +26,12 @@ class CreateCompositeSequence(plugin.HoudiniCreator): pre_create_data) # type: CreatedInstance instance_node = hou.node(instance.get("instance_node")) - filepath = "$HIP/pyblish/{}.$F4.exr".format(subset_name) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { + "trange": 1, "copoutput": filepath } diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index 124936d285..6b6b277422 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -30,12 +30,19 @@ class CreatePointCache(plugin.HoudiniCreator): "prim_to_detail_pattern": "cbId", "format": 2, "facesets": 0, - "filename": "$HIP/pyblish/{}.abc".format(subset_name) + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)) } if self.selected_nodes: parms["sop_path"] = self.selected_nodes[0].path() + # try to find output node + for child in self.selected_nodes[0].children(): + if child.type().name() == "output": + parms["sop_path"] = child.path() + break + instance_node.setParms(parms) instance_node.parm("trange").set(1) From 822f8f4bbc60c419e5f46fc7b4e7f205291951d9 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:33:42 +0200 Subject: [PATCH 055/202] :art: check for missing files --- openpype/hosts/houdini/plugins/publish/extract_ass.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index c6417ce18a..0d246625ba 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -35,8 +35,12 @@ class ExtractAss(publish.Extractor): # error and thus still continues to the integrator. To capture that # we make sure all files exist files = instance.data["frames"] - missing = [fname for fname in files - if not os.path.exists(os.path.join(staging_dir, fname))] + missing = [] + for file_name in files: + full_path = os.path.normpath(os.path.join(staging_dir, file_name)) + if not os.path.exists(full_path): + missing.append(full_path) + if missing: raise RuntimeError("Failed to complete Arnold ass extraction. " "Missing output files: {}".format(missing)) From 0e0920336b9d821857d0128101df82759f3f7ae3 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:34:06 +0200 Subject: [PATCH 056/202] =?UTF-8?q?=F0=9F=A9=B9=20parameter=20access?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/pipeline.py | 2 +- openpype/hosts/houdini/api/plugin.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index f15cd6f2d5..689d4d711c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -440,7 +440,7 @@ def list_instances(creator_id=None): return [ i for i in lib.lsattrs(instance_signature) - if i.paramEval("creator_identifier") == creator_id + if i.parm("creator_identifier").eval() == creator_id ] diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2a16b08908..560aeec6ea 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -131,11 +131,7 @@ class HoudiniCreator(NewCreator): instance_node = self._create_instance_node( subset_name, "/out", node_type) - # wondering if we'll ever need more than one member here - # in Houdini - # instance_data["members"] = [instance_node.path()] instance_data["instance_node"] = instance_node.path() - instance = CreatedInstance( self.family, subset_name, From f4b92f4d1daa67243369440aa6a4339c6c646f1b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:51:30 +0200 Subject: [PATCH 057/202] :art: improve imprinting --- openpype/hosts/houdini/api/lib.py | 10 ++++++---- openpype/hosts/houdini/api/plugin.py | 9 +++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index ceb3b753e0..2452ceef62 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -324,14 +324,16 @@ def imprint(node, data, update=False): parm = get_template_from_value(key, value) - if key in current_parms.keys(): + if key in current_parms: + if node.evalParm(key) == data[key]: + continue if not update: - log.debug("{} already exists on {}".format(key, node)) + log.debug(f"{key} already exists on {node}") else: - log.debug("replacing {}".format(key)) + log.debug(f"replacing {key}") update_parms.append(parm) continue - # parm.hide(True) + templates.append(parm) parm_group = node.parmTemplateGroup() diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 560aeec6ea..51476fef52 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -184,12 +184,13 @@ class HoudiniCreator(NewCreator): instance_node = hou.node(created_inst.get("instance_node")) current_data = read(instance_node) + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } imprint( instance_node, - { - key: value[1] for key, value in _changes.items() - if current_data.get(key) != value[1] - }, + new_values, update=True ) From 021800d1dd72fe65039c2bf427e67b76fdc239f6 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:52:00 +0200 Subject: [PATCH 058/202] :coffin: remove unused code --- .../hosts/houdini/hooks/set_operators_path.py | 25 ------------------ openpype/hosts/houdini/otls/OpenPype.hda | Bin 8238 -> 0 bytes 2 files changed, 25 deletions(-) delete mode 100644 openpype/hosts/houdini/hooks/set_operators_path.py delete mode 100644 openpype/hosts/houdini/otls/OpenPype.hda diff --git a/openpype/hosts/houdini/hooks/set_operators_path.py b/openpype/hosts/houdini/hooks/set_operators_path.py deleted file mode 100644 index 6f26baaa78..0000000000 --- a/openpype/hosts/houdini/hooks/set_operators_path.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -from openpype.lib import PreLaunchHook -import os - - -class SetOperatorsPath(PreLaunchHook): - """Set path to OpenPype assets folder.""" - - app_groups = ["houdini"] - - def execute(self): - hou_path = self.launch_context.env.get("HOUDINIPATH") - - openpype_assets = os.path.join( - os.getenv("OPENPYPE_REPOS_ROOT"), - "openpype", "hosts", "houdini", "hda" - ) - - if not hou_path: - self.launch_context.env["HOUDINIPATH"] = openpype_assets - return - - self.launch_context.env["HOUDINIPATH"] = "{}{}{}".format( - hou_path, os.pathsep, openpype_assets - ) diff --git a/openpype/hosts/houdini/otls/OpenPype.hda b/openpype/hosts/houdini/otls/OpenPype.hda deleted file mode 100644 index b34418d422b69282353dc134b1c4855e377c1039..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8238 zcmcgx?`{)E5O)fq!ceJHszg;pmjj7bs$&9tK*0%eY=@*xYzsR92_frzx3(9bcTc-} zi38#R`WF4rZ@d7{)fZ@Ib}x?4PMkQ{wiLyloj<>s{W~+;<>H&v$>$1u{cgKlEWK&e zN`?A%r5ulaX;wS`!uKCKBJvq$%N^ehSW~+42&i9>E9SUeX}+hP&U%u%nl?hgxb|GH zLoMIk|6;x+__-ghnQ!maM0DCufw`+w) zc%(am!_VW-H7j!bbM(Ijy#%2d4%fH9cC*ObK(uR~WTCcVw~Mil>8deP5Tct(-7ey2 zJn~~5oU2L^QmGkLl~6Omm1SC5j+w4*(I8Bvev(6iH|jzJYFTw?(6U2Ut^xaJV7a*& zaS!#B-5x~yP9JEuVpZRl`dYf1ET98Zcm7JHmj1@^`^5S{lyQQzgd}6LLflA;o~xPX z2Eh?&Q%)sJu%AwUOcVHUFnWDV$_!bxXAA~zlLptF2@~$5jTZ1YBp=h)9mo9qWT|Z_ zA|xXO{2&bc?)~GATMRpOAeI3!l@zP7rB76jB2a!RcV&)8N}A$Z|@^ zuY`tKJ`FDy#;<`@^z?Kez5=dJ#^?g!4FGOZXy%|sCT=n)8^AduQc3-j5!GM^&pSln zG=Qq?Kxkri$UV;R1S1QXP)Qs8IY;ZG2O2ZmyJcaql~%1PCglxxP@$z?qAlf>(=!1qLNs_jxhAyNP- z$`xG5g3lWzJWb&B0534r7&SI|0hG84_bFf&(sE~To=lU^Hdao64wB1S)Z}z% z`UkUj;dIuh8d81!18f=?C+@aZKY_;f)4wa-)-xJT1KES@GZSpI`GhLbVta>{(sensI#L>jgxJV2%i zWW@;COnf}oJ3XRbfiW((0Z4d|O_j3k+d>^NsSu=UNhfCxG-O_PEE$}9@a!{sXo_?- z8i02oO>8nWQZSqgR$FjQ24yl_ixMgcSjA2X&Kx0j1E!3oDg4LJ;)N~GNYMr)=fP;I za9$)edCeqk;rkCV-!bu-$8&m&v&9E5mrvUU!7F>vn08w%jPtF_Y1;DMfF; zWe_}io`%X(i}j1pX9=l~+NVdAz2H6rUC^3+186b-;s=PkBIJ2;)c+9^Grqq zddfmmu+ecf(T8Fb1oA5kDV%_apFpULL1-X}L(s{1nz%%P4R8hhStgmxI<{VNh}-m8 z)|>}h#eAb!+RX3m)Eo6mWyi4%m3U+)zfl4bgHXhj?LwvOV6b96yOc*}@$_}9@&FEJ zXunt<;KDFMkEKjCuFv(##vi%t2+gX?BCa8Q6Rp5&{7}g5n3+mwtQf!Q`Hh`YBVR5y z%K6>Wz-r8Lu2FdNLu8}%B5N}Z`!25()hcIT9*GTL;+4TOHR|k$?yN9l9!}5A)+(9QE{`T(OdM;}%nKf(Rz-rEp zEa$OqAv7$%N&SLXCzV^UBm)W+NxWCn~DxWr^b_1jAtUekt0V_L%HcEnK0m%mAKJ%z@ yR^PsZqcL^YdQ`(s+F1_$gAOU=NcdwZ33n_h;f*Cta^_fQ#1~6WxME4Cd-6X`t From 4ec0035ed593dd626d350f1c0fec768b176abf5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 17:56:18 +0200 Subject: [PATCH 059/202] =?UTF-8?q?=F0=9F=A6=AE=20hound=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- openpype/hosts/houdini/api/plugin.py | 3 +-- .../hosts/houdini/plugins/publish/validate_cop_output_node.py | 2 +- .../plugins/publish/validate_usd_layer_path_backslashes.py | 2 ++ 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 51476fef52..95e7add54f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -172,7 +172,7 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E401 + self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 for instance in instances: created_instance = CreatedInstance.from_existing( read(instance), self @@ -182,7 +182,6 @@ class HoudiniCreator(NewCreator): def update_instances(self, update_list): for created_inst, _changes in update_list: instance_node = hou.node(created_inst.get("instance_node")) - current_data = read(instance_node) new_values = { key: new_value diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 600dad8161..1d0377c818 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -38,7 +38,7 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin): try: output_node = instance.data["output_node"] - except KeyError as e: + except KeyError: six.reraise( PublishValidationError, PublishValidationError( diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index 972ac59f49..a0e2302495 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -4,6 +4,8 @@ import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib from openpype.pipeline import PublishValidationError +import hou + class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): """Validate USD loaded paths have no backslashes. From e57b932cf835887726e4711003b7459a0319540a Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:09:55 +0200 Subject: [PATCH 060/202] :recycle: move methods around --- openpype/hosts/houdini/api/pipeline.py | 28 -------------------------- openpype/hosts/houdini/api/plugin.py | 24 ++++++++++++++++++---- 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 689d4d711c..c1a5936415 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -430,32 +430,4 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): log.warning("%s - %s", instance_node.path(), exc) -def list_instances(creator_id=None): - """List all publish instances in the scene. - """ - instance_signature = { - "id": "pyblish.avalon.instance" - } - - return [ - i for i in lib.lsattrs(instance_signature) - if i.parm("creator_identifier").eval() == creator_id - ] - - -def remove_instance(instance): - """Remove specified instance from the scene. - - This is only removing `id` parameter so instance is no longer instance, - because it might contain valuable data for artist. - - """ - # Assume instance node is first node - instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 95e7add54f..ee508f0df4 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -13,8 +13,7 @@ from openpype.pipeline import ( CreatedInstance ) from openpype.lib import BoolDef -from openpype.hosts.houdini.api import list_instances, remove_instance -from .lib import imprint, read +from .lib import imprint, read, lsattr class OpenPypeCreatorError(CreatorError): @@ -167,7 +166,11 @@ class HoudiniCreator(NewCreator): "houdini_cached_instances", {}) instances = cached_instances.get(self.identifier) if not instances: - instances = list_instances(creator_id=self.identifier) + instances = [ + i for i in lsattr("id", "pyblish.avalon.instance") + if i.parm("creator_identifier").eval() == self.identifier + ] + if not self.collection_shared_data.get( "houdini_cached_instances"): self.collection_shared_data["houdini_cached_instances"] = {} @@ -194,8 +197,21 @@ class HoudiniCreator(NewCreator): ) def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, + because it might contain valuable data for artist. + + """ for instance in instances: - remove_instance(instance) + instance_node = hou.node(instance.data.get("instance_node")) + to_delete = None + for parameter in instance_node.spareParms(): + if parameter.name() == "id" and \ + parameter.eval() == "pyblish.avalon.instance": + to_delete = parameter + instance_node.removeSpareParmTuple(to_delete) self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): From 7b5abe1770bc2736f0b8f09998b8a85889274e5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 21 Oct 2022 18:11:44 +0200 Subject: [PATCH 061/202] :rotating_light: remove empty lines --- openpype/hosts/houdini/api/pipeline.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index c1a5936415..88c9029141 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -428,6 +428,3 @@ def on_pyblish_instance_toggled(instance, new_value, old_value): instance_node.bypass(not new_value) except hou.PermissionError as exc: log.warning("%s - %s", instance_node.path(), exc) - - - From 7a2e6bdf780f50d2680edf770955ae2db1cff1cd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Mon, 24 Oct 2022 00:10:04 +0200 Subject: [PATCH 062/202] :bug: fix caching --- openpype/hosts/houdini/api/__init__.py | 6 +----- openpype/hosts/houdini/api/plugin.py | 29 +++++++++++++------------- 2 files changed, 16 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index f29df021e1..2663a55f6f 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,9 +1,7 @@ from .pipeline import ( HoudiniHost, ls, - containerise, - list_instances, - remove_instance + containerise ) from .plugin import ( @@ -24,8 +22,6 @@ __all__ = [ "ls", "containerise", - "list_instances", - "remove_instance", "Creator", diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index ee508f0df4..b7eda7f635 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -162,21 +162,22 @@ class HoudiniCreator(NewCreator): self.log.debug("missing lock pattern {}".format(name)) def collect_instances(self): - cached_instances = self.collection_shared_data.get( - "houdini_cached_instances", {}) - instances = cached_instances.get(self.identifier) - if not instances: - instances = [ - i for i in lsattr("id", "pyblish.avalon.instance") - if i.parm("creator_identifier").eval() == self.identifier - ] + # cache instances if missing + if self.collection_shared_data.get("houdini_cached_instances") is None: + self.log.info("Caching instances ...") + self.collection_shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in self.collection_shared_data[ + "houdini_cached_instances"]: + self.collection_shared_data["houdini_cached_instances"][ + creator_id] = [i] + else: + self.collection_shared_data["houdini_cached_instances"][ + creator_id].append(i) - if not self.collection_shared_data.get( - "houdini_cached_instances"): - self.collection_shared_data["houdini_cached_instances"] = {} - self.log.info("Caching instances for {}".format(self.identifier)) - self.collection_shared_data["houdini_cached_instances"][self.identifier] = instances # noqa: E501 - for instance in instances: + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From c27f4cbbf4b671980759d8ae520b2fc724deb9cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 14:48:30 +0200 Subject: [PATCH 063/202] :art: workfile auto-creator --- openpype/hosts/houdini/api/plugin.py | 56 +++++++++----- .../houdini/plugins/create/create_workfile.py | 76 +++++++++++++++++++ .../plugins/publish/collect_current_file.py | 38 +++------- 3 files changed, 124 insertions(+), 46 deletions(-) create mode 100644 openpype/hosts/houdini/plugins/create/create_workfile.py diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b7eda7f635..aae6d137ac 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -35,6 +35,9 @@ class Creator(LegacyCreator): when hovering over a node. The information is visible under the name of the node. + Deprecated: + This creator is deprecated and will be removed in future version. + """ defaults = ['Main'] @@ -91,12 +94,35 @@ class Creator(LegacyCreator): sys.exc_info()[2]) -@six.add_metaclass(ABCMeta) -class HoudiniCreator(NewCreator): - selected_nodes = [] +class HoudiniCreatorBase(object): + @staticmethod + def cache_instances(shared_data): + """Cache instances for Creators to shared data. + + Create `houdini_cached_instances` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("houdini_cached_instances") is None: + shared_data["houdini_cached_instances"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.parm("creator_identifier").eval() + if creator_id not in shared_data["houdini_cached_instances"]: + shared_data["houdini_cached_instances"][creator_id] = [i] + else: + shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + return shared_data @staticmethod - def _create_instance_node( + def create_instance_node( node_name, parent, node_type="geometry"): # type: (str, str, str) -> hou.Node @@ -117,6 +143,11 @@ class HoudiniCreator(NewCreator): instance_node.moveToGoodPosition() return instance_node + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator, HoudiniCreatorBase): + selected_nodes = [] + def create(self, subset_name, instance_data, pre_create_data): try: if pre_create_data.get("use_selection"): @@ -127,7 +158,7 @@ class HoudiniCreator(NewCreator): if node_type is None: node_type = "geometry" - instance_node = self._create_instance_node( + instance_node = self.create_instance_node( subset_name, "/out", node_type) instance_data["instance_node"] = instance_node.path() @@ -163,20 +194,7 @@ class HoudiniCreator(NewCreator): def collect_instances(self): # cache instances if missing - if self.collection_shared_data.get("houdini_cached_instances") is None: - self.log.info("Caching instances ...") - self.collection_shared_data["houdini_cached_instances"] = {} - cached_instances = lsattr("id", "pyblish.avalon.instance") - for i in cached_instances: - creator_id = i.parm("creator_identifier").eval() - if creator_id not in self.collection_shared_data[ - "houdini_cached_instances"]: - self.collection_shared_data["houdini_cached_instances"][ - creator_id] = [i] - else: - self.collection_shared_data["houdini_cached_instances"][ - creator_id].append(i) - + self.cache_instances(self.collection_shared_data) for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py new file mode 100644 index 0000000000..2a7cb14d68 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api.lib import read +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline.legacy_io import Session +from openpype.client import get_asset_by_name + + +class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.houdini.workfile" + label = "Workfile" + family = "workfile" + icon = "gears" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + asset_name = Session["AVALON_ASSET"] + task_name = Session["AVALON_TASK"] + host_name = Session["AVALON_APP"] + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + variant, task_name, asset_doc, + project_name, host_name, current_instance) + ) + + new_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(new_instance) + + # Update instance context if is not the same + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + def collect_instances(self): + self.cache_instances(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + pass + diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index 1383c274a2..9cca07fdc7 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -5,19 +5,20 @@ from openpype.pipeline import legacy_io import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): +class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] + family = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file""" current_file = hou.hipFile.path() if not os.path.exists(current_file): - # By default Houdini will even point a new scene to a path. + # By default, Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" @@ -34,43 +35,26 @@ class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): "saved correctly." ) - context.data["currentFile"] = current_file + instance.context.data["currentFile"] = current_file folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = legacy_io.Session["AVALON_TASK"] - - data = {} - - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - - data.update({ - "subset": subset, - "asset": os.getenv("AVALON_ASSET", None), - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], + instance.data.update({ "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] }) - data['representations'] = [{ + instance.data['representations'] = [{ 'name': ext.lstrip("."), 'ext': ext.lstrip("."), 'files': file, "stagingDir": folder, }] - instance.data.update(data) - self.log.info('Collected instance: {}'.format(file)) self.log.info('Scene path: {}'.format(current_file)) self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) From 5b154d7a19d66f2e6d5b4f8567f38b441eae9066 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 15:00:17 +0200 Subject: [PATCH 064/202] :bug: fix HDA creation --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- openpype/hosts/houdini/plugins/publish/extract_hda.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 67c05b1634..5bb5786a40 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -43,7 +43,7 @@ class CreateHDA(plugin.HoudiniCreator): # if we have `use selection` enabled, and we have some # selected nodes ... subnet = parent_node.collapseIntoSubnet( - self._nodes, + self.selected_nodes, subnet_name="{}_subnet".format(node_name)) subnet.moveToGoodPosition() to_hda = subnet diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index a92d000457..8b97bf364f 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- import os - from pprint import pformat - import pyblish.api - from openpype.pipeline import publish +import hou class ExtractHDA(publish.Extractor): @@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance.data.get("members")[0] + hda_node = hou.node(instance.data.get("instance_node")) hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) From a8f1e95696b005cb8466e67ab67d176ac60b1f2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Mon, 24 Oct 2022 18:11:06 +0200 Subject: [PATCH 065/202] :bug: workfile instance changes are now persisted --- openpype/hosts/houdini/api/pipeline.py | 8 +-- .../houdini/plugins/create/create_workfile.py | 55 ++++++++++++------- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 88c9029141..6106dd4a6f 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -136,7 +136,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): ) @staticmethod - def _create_context_node(): + def create_context_node(): """Helper for creating context holding node. Returns: @@ -151,20 +151,20 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - op_ctx.hide(True) + # op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() lib.imprint(op_ctx, data) def get_context_data(self): op_ctx = hou.node(CONTEXT_CONTAINER) if not op_ctx: - op_ctx = self._create_context_node() + op_ctx = self.create_context_node() return lib.read(op_ctx) def save_file(self, dst_path=None): diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py index 2a7cb14d68..0c6d840810 100644 --- a/openpype/hosts/houdini/plugins/create/create_workfile.py +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -1,10 +1,12 @@ # -*- coding: utf-8 -*- """Creator plugin for creating workfiles.""" from openpype.hosts.houdini.api import plugin -from openpype.hosts.houdini.api.lib import read +from openpype.hosts.houdini.api.lib import read, imprint +from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER from openpype.pipeline import CreatedInstance, AutoCreator -from openpype.pipeline.legacy_io import Session +from openpype.pipeline import legacy_io from openpype.client import get_asset_by_name +import hou class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): @@ -12,7 +14,7 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): identifier = "io.openpype.creators.houdini.workfile" label = "Workfile" family = "workfile" - icon = "gears" + icon = "document" default_variant = "Main" @@ -25,9 +27,9 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): ), None) project_name = self.project_name - asset_name = Session["AVALON_ASSET"] - task_name = Session["AVALON_TASK"] - host_name = Session["AVALON_APP"] + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] if current_instance is None: asset_doc = get_asset_by_name(project_name, asset_name) @@ -44,17 +46,16 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): variant, task_name, asset_doc, project_name, host_name, current_instance) ) - - new_instance = CreatedInstance( + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( self.family, subset_name, data, self ) - self._add_instance_to_context(new_instance) - - # Update instance context if is not the same + self._add_instance_to_context(current_instance) elif ( current_instance["asset"] != asset_name or current_instance["task"] != task_name ): + # Update instance context if is not the same asset_doc = get_asset_by_name(project_name, asset_name) subset_name = self.get_subset_name( variant, task_name, asset_doc, project_name, host_name @@ -63,14 +64,30 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): current_instance["task"] = task_name current_instance["subset"] = subset_name + # write workfile information to context container. + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + workfile_data = {"workfile": current_instance.data_to_store()} + imprint(op_ctx, workfile_data) + def collect_instances(self): - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa - created_instance = CreatedInstance.from_existing( - read(instance), self - ) - self._add_instance_to_context(created_instance) + op_ctx = hou.node(CONTEXT_CONTAINER) + instance = read(op_ctx) + if not instance: + return + workfile = instance.get("workfile") + if not workfile: + return + created_instance = CreatedInstance.from_existing( + workfile, self + ) + self._add_instance_to_context(created_instance) def update_instances(self, update_list): - pass - + op_ctx = hou.node(CONTEXT_CONTAINER) + for created_inst, _changes in update_list: + if created_inst["creator_identifier"] == self.identifier: + workfile_data = {"workfile": created_inst.data_to_store()} + imprint(op_ctx, workfile_data, update=True) From 051189bbca25f08fa1a1403809e92b0a80d49e18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:09 +0200 Subject: [PATCH 066/202] :bug: fix creator id --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 5bb5786a40..590c8f97fd 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -11,7 +11,7 @@ from openpype.hosts.houdini.api import plugin class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - identifier = "hda" + identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" From 6db2c8e33f78d2e6751665c3e22bb8c91b4329ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:36:54 +0200 Subject: [PATCH 067/202] :recycle: refactor name, collect legacy subsets --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index aae6d137ac..4dc6641ac9 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -96,13 +96,15 @@ class Creator(LegacyCreator): class HoudiniCreatorBase(object): @staticmethod - def cache_instances(shared_data): + def cache_subsets(shared_data): """Cache instances for Creators to shared data. - Create `houdini_cached_instances` key when needed in shared data and + Create `houdini_cached_subsets` key when needed in shared data and fill it with all collected instances from the scene under its respective creator identifiers. + U + Args: Dict[str, Any]: Shared data. @@ -110,15 +112,26 @@ class HoudiniCreatorBase(object): Dict[str, Any]: Shared data dictionary. """ - if shared_data.get("houdini_cached_instances") is None: - shared_data["houdini_cached_instances"] = {} + if shared_data.get("houdini_cached_subsets") is None: + shared_data["houdini_cached_subsets"] = {} + if shared_data.get("houdini_cached_legacy_subsets") is None: + shared_data["houdini_cached_legacy_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: + if not i.parm("creator_identifier"): + # we have legacy instance + family = i.parm("family").eval() + if family not in shared_data["houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][family] = [i] + else: + shared_data["houdini_cached_legacy_subsets"][family].append(i) + continue + creator_id = i.parm("creator_identifier").eval() - if creator_id not in shared_data["houdini_cached_instances"]: - shared_data["houdini_cached_instances"][creator_id] = [i] + if creator_id not in shared_data["houdini_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_instances"][creator_id].append(i) # noqa + shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -194,8 +207,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing - self.cache_instances(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_instances"].get(self.identifier, []): # noqa + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa created_instance = CreatedInstance.from_existing( read(instance), self ) From 0fa86d5ce4fd772dfa37fb54eea1dc438680a471 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:15 +0200 Subject: [PATCH 068/202] :bug: fix lost pointer issue --- openpype/hosts/houdini/api/lib.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 2452ceef62..13f5a62ec3 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -348,6 +348,9 @@ def imprint(node, data, update=False): else: for template in templates: parm_group.appendToFolder(parm_folder, template) + # this is needed because the pointer to folder + # is for some reason lost every call to `appendToFolder()` + parm_folder = parm_group.findFolder("Extra") node.setParmTemplateGroup(parm_group) From 1dcd49576b1c98d200c494fe4cd8658468bca4d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:37:37 +0200 Subject: [PATCH 069/202] :bug: hide context node by default --- openpype/hosts/houdini/api/pipeline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index 6106dd4a6f..b0791fcb6c 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -151,7 +151,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): op_ctx.setCreatorState("OpenPype") op_ctx.setComment("OpenPype node to hold context metadata") op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) - # op_ctx.hide(True) + op_ctx.hide(True) return op_ctx def update_context_data(self, data, changes): From 20d111d60a1c0ac431adfc8567eeac87679b144a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 16:38:02 +0200 Subject: [PATCH 070/202] :sparkles: add legacy subset converter --- .../houdini/plugins/create/convert_legacy.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 openpype/hosts/houdini/plugins/create/convert_legacy.py diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py new file mode 100644 index 0000000000..be7ef714ba --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.houdini.api.lib import imprint + + +class HoudiniLegacyConvertor(SubsetConvertorPlugin): + identifier = "io.openpype.creators.houdini.legacy" + family_to_id = { + "camera": "io.openpype.creators.houdini.camera", + "ass": "io.openpype.creators.houdini.ass", + "imagesequence": "io.openpype.creators.houdini.imagesequence", + "hda": "io.openpype.creators.houdini.hda", + "pointcache": "io.openpype.creators.houdini.pointcache", + "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy", + "redshift_rop": "io.openpype.creators.houdini.redshift_rop", + "usd": "io.openpype.creators.houdini.usd", + "usdrender": "io.openpype.creators.houdini.usdrender", + "vdbcache": "io.openpype.creators.houdini.vdbcache" + } + + def __init__(self, *args, **kwargs): + super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) + self.legacy_subsets = {} + + def find_instances(self): + self.legacy_subsets = self.collection_shared_data.get( + "houdini_cached_legacy_subsets") + if not self.legacy_subsets: + return + self.add_convertor_item("Found {} incompatible subset{}.".format( + len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") + ) + + def convert(self): + if not self.legacy_subsets: + return + + for family, subsets in self.legacy_subsets.items(): + if family in self.family_to_id: + for subset in subsets: + data = { + "creator_identifier": self.family_to_id[family], + "instance_node": subset.path() + } + print("Converting {} to {}".format( + subset.path(), self.family_to_id[family])) + imprint(subset, data) From 8a1040aa7495aa6c3578033c5f6bad0321ec209d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 18:26:36 +0200 Subject: [PATCH 071/202] :rotating_light: various :dog: fixes and docstrings --- openpype/hosts/houdini/api/plugin.py | 27 ++++++++++++------- .../houdini/plugins/create/convert_legacy.py | 27 +++++++++++++++++++ 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 4dc6641ac9..b5f79838d1 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -103,7 +103,9 @@ class HoudiniCreatorBase(object): fill it with all collected instances from the scene under its respective creator identifiers. - U + If legacy instances are detected in the scene, create + `houdini_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. Args: Dict[str, Any]: Shared data. @@ -121,17 +123,21 @@ class HoudiniCreatorBase(object): if not i.parm("creator_identifier"): # we have legacy instance family = i.parm("family").eval() - if family not in shared_data["houdini_cached_legacy_subsets"]: - shared_data["houdini_cached_legacy_subsets"][family] = [i] + if family not in shared_data[ + "houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][ + family] = [i] else: - shared_data["houdini_cached_legacy_subsets"][family].append(i) + shared_data[ + "houdini_cached_legacy_subsets"][family].append(i) continue creator_id = i.parm("creator_identifier").eval() if creator_id not in shared_data["houdini_cached_subsets"]: shared_data["houdini_cached_subsets"][creator_id] = [i] else: - shared_data["houdini_cached_subsets"][creator_id].append(i) # noqa + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa return shared_data @staticmethod @@ -159,6 +165,7 @@ class HoudiniCreatorBase(object): @six.add_metaclass(ABCMeta) class HoudiniCreator(NewCreator, HoudiniCreatorBase): + """Base class for most of the Houdini creator plugins.""" selected_nodes = [] def create(self, subset_name, instance_data, pre_create_data): @@ -208,7 +215,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def collect_instances(self): # cache instances if missing self.cache_subsets(self.collection_shared_data) - for instance in self.collection_shared_data["houdini_cached_subsets"].get(self.identifier, []): # noqa + for instance in self.collection_shared_data[ + "houdini_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( read(instance), self ) @@ -231,11 +239,10 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): def remove_instances(self, instances): """Remove specified instance from the scene. - This is only removing `id` parameter so instance is no longer - instance, - because it might contain valuable data for artist. + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. - """ + """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) to_delete = None diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index be7ef714ba..2f3d1ef708 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -1,9 +1,22 @@ # -*- coding: utf-8 -*- +"""Convertor for legacy Houdini subsets.""" from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin from openpype.hosts.houdini.api.lib import imprint class HoudiniLegacyConvertor(SubsetConvertorPlugin): + """Find and convert any legacy subsets in the scene. + + This Convertor will find all legacy subsets in the scene and will + transform them to the current system. Since the old subsets doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating subset + of the same family and there is no way to handle it. This code should + nevertheless cover all creators that came with OpenPype. + + """ identifier = "io.openpype.creators.houdini.legacy" family_to_id = { "camera": "io.openpype.creators.houdini.camera", @@ -23,6 +36,15 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): self.legacy_subsets = {} def find_instances(self): + """Find legacy subsets in the scene. + + Legacy subsets are the ones that doesn't have `creator_identifier` + parameter on them. + + This is using cached entries done in + :py:meth:`~HoudiniCreatorBase.cache_subsets()` + + """ self.legacy_subsets = self.collection_shared_data.get( "houdini_cached_legacy_subsets") if not self.legacy_subsets: @@ -32,6 +54,11 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): ) def convert(self): + """Convert all legacy subsets to current. + + It is enough to add `creator_identifier` and `instance_node`. + + """ if not self.legacy_subsets: return From 4be13d4324cbf7efc9128cb613f4fe3456e1416e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Tue, 25 Oct 2022 22:55:09 +0200 Subject: [PATCH 072/202] :recycle: switch print for log --- openpype/hosts/houdini/plugins/create/convert_legacy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py index 2f3d1ef708..4b8041b4f5 100644 --- a/openpype/hosts/houdini/plugins/create/convert_legacy.py +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -69,6 +69,6 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin): "creator_identifier": self.family_to_id[family], "instance_node": subset.path() } - print("Converting {} to {}".format( + self.log.info("Converting {} to {}".format( subset.path(), self.family_to_id[family])) imprint(subset, data) From 00c2ac36c5c90181db330fba8f10ca6b094c96db Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 31 Oct 2022 11:50:11 +0100 Subject: [PATCH 073/202] Fix enable state of "no registered families" item --- openpype/tools/creator/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/creator/model.py b/openpype/tools/creator/model.py index d3d60b96f2..307993103b 100644 --- a/openpype/tools/creator/model.py +++ b/openpype/tools/creator/model.py @@ -36,7 +36,7 @@ class CreatorsModel(QtGui.QStandardItemModel): if not items: item = QtGui.QStandardItem("No registered families") item.setEnabled(False) - item.setData(QtCore.Qt.ItemIsEnabled, False) + item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) self.invisibleRootItem().appendRows(items) From 7cf4e085f7c00ff8a9af2fdf538d7b0aed88f566 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:02 +0100 Subject: [PATCH 074/202] handle more types --- openpype/lib/transcoding.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index e736ba8ef0..4fc3a7ce94 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -111,6 +111,7 @@ def get_oiio_info_for_input(filepath, logger=None): class RationalToInt: """Rational value stored as division of 2 integers using string.""" + def __init__(self, string_value): parts = string_value.split("/") top = float(parts[0]) @@ -157,16 +158,16 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "int": return int(value) - if value_type == "float": + if value_type in ("float", "double"): return float(value) # Vectors will probably have more types - if value_type in ("vec2f", "float2"): + if value_type in ("vec2f", "float2", "float2d"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 # - are returned as list of lists - if value_type == "matrix": + if value_type in ("matrix", "matrixd"): output = [] current_index = -1 parts = value.split(",") @@ -198,7 +199,7 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "rational2i": return RationalToInt(value) - if value_type == "vector": + if value_type in ("vector", "vectord"): parts = [part.strip() for part in value.split(",")] output = [] for part in parts: From 7aca8136f5ba0ab22fd0e6084d5cc2901ac791a1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:19:32 +0100 Subject: [PATCH 075/202] 'get_oiio_info_for_input' can return information about all subimages --- openpype/lib/transcoding.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 4fc3a7ce94..9d87818508 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -77,26 +77,38 @@ def get_transcode_temp_directory(): ) -def get_oiio_info_for_input(filepath, logger=None): +def get_oiio_info_for_input(filepath, logger=None, subimages=False): """Call oiiotool to get information about input and return stdout. Stdout should contain xml format string. """ args = [ - get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath + get_oiio_tools_path(), + "--info", + "-v" ] + if subimages: + args.append("-a") + + args.extend(["-i:infoformat=xml", filepath]) + output = run_subprocess(args, logger=logger) output = output.replace("\r\n", "\n") xml_started = False + subimages = [] lines = [] for line in output.split("\n"): if not xml_started: if not line.startswith("<"): continue xml_started = True + if xml_started: lines.append(line) + if line == "": + subimages.append(lines) + lines = [] if not xml_started: raise ValueError( @@ -105,8 +117,14 @@ def get_oiio_info_for_input(filepath, logger=None): ) ) - xml_text = "\n".join(lines) - return parse_oiio_xml_output(xml_text, logger=logger) + output = [] + for subimage in subimages: + xml_text = "\n".join(subimage) + output.append(parse_oiio_xml_output(xml_text, logger=logger)) + + if subimages: + return output + return output[0] class RationalToInt: From 61d9657ce16e9eb3b4a434368cb316d2bf8ac05a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 12:20:58 +0100 Subject: [PATCH 076/202] subimages are reason for conversion and skip definition of input channels if there are subimages --- openpype/lib/transcoding.py | 39 +++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 9d87818508..32c71fee7e 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -399,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath): if not input_info: return None + subimages = input_info.get("subimages") + if subimages is not None and subimages > 1: + return True + # Check compression compression = input_info["attribs"].get("compression") if compression in ("dwaa", "dwab"): @@ -507,13 +511,23 @@ def convert_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) - oiio_cmd.extend([ + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - "-i:ch={}".format(input_channels_str), first_input_path, + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + + oiio_cmd.extend([ + input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + # best candidate for output + "--subimage", "0" ]) # Add frame definitions to arguments @@ -631,6 +645,15 @@ def convert_input_paths_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to avoid memory + # leak issues + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + for input_path in input_paths: # Prepare subprocess arguments oiio_cmd = [ @@ -644,13 +667,13 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--compression", compression]) oiio_cmd.extend([ - # Tell oiiotool which channels should be loaded - # - other channels are not loaded to memory so helps to - # avoid memory leak issues - "-i:ch={}".format(input_channels_str), input_path, + input_arg, input_path, # Tell oiiotool which channels should be put to top stack # (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + # TODO we should look for all subimages and try (somehow) find the + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From f437ce7c983cd30a37c3ed697e73d670a79fa87f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:37:26 +0100 Subject: [PATCH 077/202] fix variable names --- openpype/lib/transcoding.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 32c71fee7e..1ab3cb4081 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -96,7 +96,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): output = output.replace("\r\n", "\n") xml_started = False - subimages = [] + subimages_lines = [] lines = [] for line in output.split("\n"): if not xml_started: @@ -107,7 +107,7 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): if xml_started: lines.append(line) if line == "": - subimages.append(lines) + subimages_lines.append(lines) lines = [] if not xml_started: @@ -118,8 +118,8 @@ def get_oiio_info_for_input(filepath, logger=None, subimages=False): ) output = [] - for subimage in subimages: - xml_text = "\n".join(subimage) + for subimage_lines in subimages_lines: + xml_text = "\n".join(subimage_lines) output.append(parse_oiio_xml_output(xml_text, logger=logger)) if subimages: @@ -651,7 +651,7 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - # - this option is crashing if used on multipart/subimages exrs + # - this option is crashing if used on multipart exrs input_arg += ":ch={}".format(input_channels_str) for input_path in input_paths: From 560f327de1cbbff29db576c382f2191844855338 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:38:57 +0100 Subject: [PATCH 078/202] comment out subimage --- openpype/lib/transcoding.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 1ab3cb4081..af40fa752c 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -524,10 +524,10 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - # best candidate for output - "--subimage", "0" + # "--subimage", "0" ]) # Add frame definitions to arguments @@ -671,9 +671,10 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, + # WARNING: This is commented out because ffmpeg won't be able to + # render proper output when only one subimage is outputed with oiio # Use first subimage - # TODO we should look for all subimages and try (somehow) find the - "--subimage", "0" + # "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From ebbf827f0866b05d3d0915a6cb7f86f1bf814fa6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 2 Nov 2022 16:43:11 +0100 Subject: [PATCH 079/202] fix line length --- openpype/lib/transcoding.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index af40fa752c..5a57026496 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -672,7 +672,8 @@ def convert_input_paths_for_ffmpeg( # (and output) "--ch", channels_arg, # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio + # render proper output when only one subimage is outputed + # with oiiotool # Use first subimage # "--subimage", "0" ]) From 372c6d89c37e1d67aea8caab69667d55e5b6f34d Mon Sep 17 00:00:00 2001 From: 2-REC Date: Fri, 4 Nov 2022 15:26:33 +0700 Subject: [PATCH 080/202] Setting from other plugin --- .../publish/validate_texture_workfiles.py | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index 56ea82f6b6..a25b80438d 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,5 +1,7 @@ +import os import pyblish.api +from openpype.settings import get_project_settings from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -18,23 +20,40 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True + #TODO(2-rec): remove/change comment # from presets main_workfile_extensions = ['mra'] def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] - if ext not in self.main_workfile_extensions: + main_workfile_extensions = self.get_main_workfile_extensions() + if ext not in main_workfile_extensions: self.log.warning("Only secondary workfile present!") return if not instance.data.get("resources"): msg = "No secondary workfile present for workfile '{}'". \ format(instance.data["name"]) - ext = self.main_workfile_extensions[0] + ext = main_workfile_extensions[0] formatting_data = {"file_name": instance.data["name"], "extension": ext} raise PublishXmlValidationError(self, msg, formatting_data=formatting_data ) + + @classmethod + def get_main_workfile_extensions(cls): + project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) + + #TODO: find better way? (depends on other plugin) + try: + extensions = (project_settings["standalonepublisher"] + ["publish"] + ["CollectTextures"] + ["main_workfile_extensions"]) + except KeyError: + extensions = cls.main_workfile_extensions + + return extensions From 5c37d91138332442fa1d746003f8b16a7e623f2e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 16 Nov 2022 17:20:44 +0100 Subject: [PATCH 081/202] uncomment subimages because multipart exr is created which actually can't ffmpeg handle --- openpype/lib/transcoding.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 5a57026496..6f571ea522 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -476,7 +476,7 @@ def convert_for_ffmpeg( if input_frame_start is not None and input_frame_end is not None: is_sequence = int(input_frame_end) != int(input_frame_start) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -524,10 +524,8 @@ def convert_for_ffmpeg( input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed with oiio # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) # Add frame definitions to arguments @@ -621,7 +619,7 @@ def convert_input_paths_for_ffmpeg( " \".exr\" extension. Got \"{}\"." ).format(ext)) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -639,6 +637,7 @@ def convert_input_paths_for_ffmpeg( red, green, blue, alpha = review_channels input_channels = [red, green, blue] + # TODO find subimage inder where rgba is available for multipart exrs channels_arg = "R={},G={},B={}".format(red, green, blue) if alpha is not None: channels_arg += ",A={}".format(alpha) @@ -671,11 +670,8 @@ def convert_input_paths_for_ffmpeg( # Tell oiiotool which channels should be put to top stack # (and output) "--ch", channels_arg, - # WARNING: This is commented out because ffmpeg won't be able to - # render proper output when only one subimage is outputed - # with oiiotool # Use first subimage - # "--subimage", "0" + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): From 64a1e55170153504fcb4ff892a8030bc14ef034f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 18 Nov 2022 15:59:58 +0100 Subject: [PATCH 082/202] enhance speed of collect audio by converting it to context plugin --- openpype/plugins/publish/collect_audio.py | 175 +++++++++++++++------- 1 file changed, 124 insertions(+), 51 deletions(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7d53b24e54..db567f8b8f 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,21 +1,27 @@ +import collections import pyblish.api from openpype.client import ( - get_last_version_by_subset_name, + get_assets, + get_subsets, + get_last_versions, get_representations, ) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.pipeline import get_representation_path_with_anatomy -class CollectAudio(pyblish.api.InstancePlugin): +class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. The audio subset name searched for is defined in: project settings > Collect Audio + + Note: + The plugin was instance plugin but because of so much queries the + plugin was slowing down whole collection phase a lot thus was + converted to context plugin which requires only 4 queries top. """ + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" - def process(self, instance): - if instance.data.get("audio"): - self.log.info( - "Skipping Audio collecion. It is already collected" - ) + def process(self, context): + # Fake filtering by family inside context plugin + filtered_instances = [] + for instance in pyblish.api.instances_by_plugin( + context, self.__class__ + ): + # Skip instances that already have audio filled + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + continue + filtered_instances.append(instance) + + # Skip if none of instances remained + if not filtered_instances: return # Add audio to instance if exists. + instances_by_asset_name = collections.defaultdict(list) + for instance in filtered_instances: + asset_name = instance.data["asset"] + instances_by_asset_name[asset_name].append(instance) + + asset_names = set(instances_by_asset_name.keys()) self.log.info(( - "Searching for audio subset '{subset}'" - " in asset '{asset}'" + "Searching for audio subset '{subset}' in assets {assets}" ).format( subset=self.audio_subset_name, - asset=instance.data["asset"] + assets=", ".join([ + '"{}"'.format(asset_name) + for asset_name in asset_names + ]) )) - repre_doc = self._get_repre_doc(instance) + # Query all required documents + project_name = context.data["projectName"] + anatomy = context.data["anatomy"] + repre_docs_by_asset_names = self.query_representations( + project_name, asset_names) - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") + for asset_name, instances in instances_by_asset_name.items(): + repre_docs = repre_docs_by_asset_names[asset_name] + if not repre_docs: + continue - def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio") - if cache is None: - cache = {} - instance.context.data["__cache_asset_audio"] = cache - asset_name = instance.data["asset"] + repre_doc = repre_docs[0] + repre_path = get_representation_path_with_anatomy( + repre_doc, anatomy + ) + for instance in instances: + instance.data["audio"] = [{ + "offset": 0, + "filename": repre_path + }] + self.log.info("Audio Data added to instance ...") - # first try to get it from cache - if asset_name in cache: - return cache[asset_name] + def query_representations(self, project_name, asset_names): + """Query representations related to audio subsets for passed assets. - project_name = legacy_io.active_project() + Args: + project_name (str): Project in which we're looking for all + entities. + asset_names (Iterable[str]): Asset names where to look for audio + subsets and their representations. - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( + Returns: + collections.defaultdict[str, List[Dict[Str, Any]]]: Representations + related to audio subsets by asset name. + """ + + output = collections.defaultdict(list) + # Query asset documents + asset_docs = get_assets( project_name, - self.audio_subset_name, - asset_name=asset_name, + asset_names=asset_names, fields=["_id"] ) - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] + asset_id_by_name = {} + for asset_doc in asset_docs: + asset_id_by_name[asset_doc["name"]] = asset_doc["_id"] + asset_ids = set(asset_id_by_name.values()) - # update cache - cache[asset_name] = repre_doc + # Query subsets with name define by 'audio_subset_name' attr + # - one or none subsets with the name should be available on an asset + subset_docs = get_subsets( + project_name, + subset_names=[self.audio_subset_name], + asset_ids=asset_ids, + fields=["_id", "parent"] + ) + subset_id_by_asset_id = {} + for subset_doc in subset_docs: + asset_id = subset_doc["parent"] + subset_id_by_asset_id[asset_id] = subset_doc["_id"] - return repre_doc + subset_ids = set(subset_id_by_asset_id.values()) + if not subset_ids: + return output + + # Find all latest versions for the subsets + version_docs_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) + version_id_by_subset_id = { + subset_id: version_doc["_id"] + for subset_id, version_doc in version_docs_by_subset_id.items() + } + version_ids = set(version_id_by_subset_id.values()) + if not version_ids: + return output + + # Find representations under latest versions of audio subsets + repre_docs = get_representations( + project_name, version_ids=version_ids + ) + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + repre_docs_by_version_id[version_id].append(repre_doc) + + if not repre_docs_by_version_id: + return output + + for asset_name in asset_names: + asset_id = asset_id_by_name.get(asset_name) + subset_id = subset_id_by_asset_id.get(asset_id) + version_id = version_id_by_subset_id.get(subset_id) + output[asset_name] = repre_docs_by_version_id[version_id] + return output From 110cd58fd168861b91cbdee366324edbe21c8917 Mon Sep 17 00:00:00 2001 From: Joseff Date: Tue, 22 Nov 2022 18:40:42 +0100 Subject: [PATCH 083/202] Update the UOpenPypePublishInstance to use UDataAsset --- .../Private/OpenPypePublishInstance.cpp | 184 ++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 54 ++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../Private/OpenPypePublishInstance.cpp | 185 +++++++++++------- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 61 +++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- 8 files changed, 330 insertions(+), 174 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..72dc617699 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,147 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split("/" + GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + UObject* Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(NULL)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(NULL); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..54e24e03d7 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -5,17 +5,51 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + return AssetDataInternal.Union(AssetDataExternal); + }; + + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const UObject* InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0b..9a89c3868b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,148 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "AssetToolsModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split(GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + const TObjectPtr Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (!IsValid(Asset)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } + + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; + + if (result) + { + AssetDataInternal.Emplace(Asset); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } +} + +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) +{ + if (Cast(InAssetData.GetAsset()) == nullptr) + { + if (AssetDataInternal.Contains(nullptr)) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; - if (assetDir.StartsWith(*selfDir)) + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + + // Check for duplicated assets + for (const TObjectPtr& Asset : AssetDataInternal) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } + + // Check if no UOpenPypePublishInstance type assets are included + for (const TObjectPtr& Asset : AssetDataExternal) + { + if (Cast(Asset) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) -{ - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) - { - // exclude self - if (assetFName != "AssetContainer") - { - - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); - } - } -} +#endif diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c689..9b26da7fa4 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7..97df757acd 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,21 +1,62 @@ #pragma once +#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); - + + UPROPERTY(VisibleAnywhere,BlueprintReadOnly) + TSet> AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; + + /** + * Function for returning all the assets in the container. + * + * @return Returns all the internal and externally added assets into one set (TSet). + */ + UFUNCTION(BlueprintCallable, Category = Python) + TSet GetAllAssets() const + { + TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); + + TSet ResultSet; + + for (auto& Asset : Unionized) + ResultSet.Add(Asset.Get()); + + return ResultSet; + } + private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const TObjectPtr& InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13e..7d2c77fe6e 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; From 56d5bf89212ef8f9c65bc782a11600ea62291e8a Mon Sep 17 00:00:00 2001 From: Joseff Date: Wed, 23 Nov 2022 10:58:11 +0100 Subject: [PATCH 084/202] Changed the member variables to use TSoftObjectPtr<> TSets now use TSoftObjectPtr<> for referencing the assets, which can reduce memory usage. --- .../Private/OpenPypePublishInstance.cpp | 25 +++--- .../OpenPype/Public/OpenPypePublishInstance.h | 84 +++++++++++++++---- .../Private/OpenPypePublishInstance.cpp | 15 ++-- .../OpenPype/Public/OpenPypePublishInstance.h | 79 ++++++++++++----- 4 files changed, 146 insertions(+), 57 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 72dc617699..ed81104c05 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -15,6 +15,9 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< FAssetRegistryModule>("AssetRegistry"); + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + FString Left, Right; GetPathName().Split("/" + GetName(), &Left, &Right); @@ -33,7 +36,6 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); - } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -53,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -63,14 +67,14 @@ void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { if (Cast(InAssetData.GetAsset()) == nullptr) { - if (AssetDataInternal.Contains(NULL)) + if (AssetDataInternal.Contains(nullptr)) { - AssetDataInternal.Remove(NULL); + AssetDataInternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataInternal) } else { - AssetDataExternal.Remove(NULL); + AssetDataExternal.Remove(nullptr); REMOVE_INVALID_ENTRIES(AssetDataExternal) } } @@ -121,22 +125,21 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( UOpenPypePublishInstance, AssetDataExternal)) { - // Check for duplicated assets for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { AssetDataExternal.Remove(Asset); - return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); } - } // Check if no UOpenPypePublishInstance type assets are included for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 54e24e03d7..0e946fb039 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -11,32 +11,80 @@ class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset public: - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet AssetDataInternal; + /** + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + +private: + + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; /** * This property allows exposing the array to include other assets from any other directory than what it's currently * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) + UPROPERTY(EditAnywhere, Category = "Assets") bool bAddExternalAssets = false; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet AssetDataExternal; + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") + TSet> AssetDataExternal; - /** - * Function for returning all the assets in the container. - * - * @return Returns all the internal and externally added assets into one set (TSet). - */ - UFUNCTION(BlueprintCallable, Category = Python) - TSet GetAllAssets() const - { - return AssetDataInternal.Union(AssetDataExternal); - }; - - -private: void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 9a89c3868b..c2c7e249c3 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -35,6 +35,7 @@ UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& Obj AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + } void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) @@ -54,9 +55,11 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (result) { - AssetDataInternal.Emplace(Asset); - UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), - *this->GetName(), *Asset->GetName()); + if (AssetDataInternal.Emplace(Asset).IsValidId()) + { + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); + } } } @@ -124,7 +127,7 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro { // Check for duplicated assets - for (const TObjectPtr& Asset : AssetDataInternal) + for (const auto& Asset : AssetDataInternal) { if (AssetDataExternal.Contains(Asset)) { @@ -135,9 +138,9 @@ void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& Pro } // Check if no UOpenPypePublishInstance type assets are included - for (const TObjectPtr& Asset : AssetDataExternal) + for (const auto& Asset : AssetDataExternal) { - if (Cast(Asset) != nullptr) + if (Cast(Asset.Get()) != nullptr) { AssetDataExternal.Remove(Asset); return SendNotification("You are not allowed to add publish instances!"); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 97df757acd..2f066bd94b 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -9,41 +9,78 @@ UCLASS(Blueprintable) class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { GENERATED_UCLASS_BODY() - public: - - UPROPERTY(VisibleAnywhere,BlueprintReadOnly) - TSet> AssetDataInternal; - /** - * This property allows exposing the array to include other assets from any other directory than what it's currently - * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 */ - UPROPERTY(EditAnywhere, BlueprintReadOnly) - bool bAddExternalAssets = false; + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; - UPROPERTY(EditAnywhere, BlueprintReadOnly, meta=(EditCondition="bAddExternalAssets")) - TSet> AssetDataExternal; + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } /** - * Function for returning all the assets in the container. + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. * - * @return Returns all the internal and externally added assets into one set (TSet). + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! */ - UFUNCTION(BlueprintCallable, Category = Python) + UFUNCTION(BlueprintCallable, BlueprintPure) TSet GetAllAssets() const { - TSet> Unionized = AssetDataInternal.Union(AssetDataExternal); - - TSet ResultSet; + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; - for (auto& Asset : Unionized) - ResultSet.Add(Asset.Get()); + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); return ResultSet; } private: + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows the instance to include other assets from any other directory than what it's currently + * monitoring. + * @attention assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category="Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; void OnAssetCreated(const FAssetData& InAssetData); void OnAssetRemoved(const FAssetData& InAssetData); @@ -52,11 +89,9 @@ private: bool IsUnderSameDir(const TObjectPtr& InAsset) const; #ifdef WITH_EDITOR - + void SendNotification(const FString& Text) const; virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; #endif - }; - From ec0f6986158b2eab312ade16430c0eb969aa0e68 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 23 Nov 2022 14:52:31 +0100 Subject: [PATCH 085/202] celaction: fixing host --- openpype/hosts/celaction/__init__.py | 10 +++ openpype/hosts/celaction/addon.py | 24 ++++++ .../hooks/pre_celaction_registers.py | 73 ++++++++++++------- 3 files changed, 80 insertions(+), 27 deletions(-) create mode 100644 openpype/hosts/celaction/addon.py diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py index e69de29bb2..8983d48d7d 100644 --- a/openpype/hosts/celaction/__init__.py +++ b/openpype/hosts/celaction/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + CELACTION_ROOT_DIR, + CelactionAddon, +) + + +__all__ = ( + "CELACTION_ROOT_DIR", + "CelactionAddon", +) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py new file mode 100644 index 0000000000..c6d30935a1 --- /dev/null +++ b/openpype/hosts/celaction/addon.py @@ -0,0 +1,24 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CelactionAddon(OpenPypeModule, IHostAddon): + name = "celaction" + host_name = "celaction" + + def initialize(self, module_settings): + self.enabled = True + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".scn"] diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py index e49e66f163..84ac3d130a 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_registers.py @@ -27,17 +27,24 @@ class CelactionPrelaunchHook(PreLaunchHook): app = "celaction_publish" # setting output parameters - path = r"Software\CelAction\CelAction2D\User Settings" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) + path_user_settings = "\\".join([ + "Software", "CelAction", "CelAction2D", "User Settings" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings) hKey = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - "Software\\CelAction\\CelAction2D\\User Settings", 0, - winreg.KEY_ALL_ACCESS) + winreg.HKEY_CURRENT_USER, path_user_settings, 0, + winreg.KEY_ALL_ACCESS + ) - # TODO: this will need to be checked more thoroughly - pype_exe = os.getenv("OPENPYPE_EXECUTABLE") + openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") - winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe) + winreg.SetValueEx( + hKey, + "SubmitAppTitle", + 0, + winreg.REG_SZ, + openpype_executable + ) parameters = [ "launch", @@ -53,33 +60,45 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionHeight *Y*", # "--programDir \"'*PROGPATH*'\"" ] - winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters)) + winreg.SetValueEx( + hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, + " ".join(parameters) + ) # setting resolution parameters - path = r"Software\CelAction\CelAction2D\User Settings\Dialogs" - path += r"\SubmitOutput" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_submit = "\\".join([ + path_user_settings, "Dialogs", "SubmitOutput" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_submit, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) # making sure message dialogs don't appear when overwriting - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\OverwriteScene" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + path_overwrite_scene = "\\".join([ + path_user_settings, "Messages", "OverwriteScene" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\SceneSaved" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) + # set scane as not saved + path_scene_saved = "\\".join([ + path_user_settings, "Messages", "SceneSaved" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_scene_saved, 0, + winreg.KEY_ALL_ACCESS + ) winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) @@ -90,11 +109,11 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - pype_celaction_dir = os.path.dirname(os.path.dirname( + openpype_celaction_dir = os.path.dirname(os.path.dirname( os.path.abspath(celaction.__file__) )) template_path = os.path.join( - pype_celaction_dir, + openpype_celaction_dir, "resources", "celaction_template_scene.scn" ) From 27e4985d488401e944a44c7b705f36f407f985e7 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:04:54 +0000 Subject: [PATCH 086/202] Extract Alembic animation from Blender --- .../plugins/publish/extract_abc_animation.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 openpype/hosts/blender/plugins/publish/extract_abc_animation.py diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py new file mode 100644 index 0000000000..80f7a4ba58 --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -0,0 +1,75 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin +from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY + + +class ExtractAnimationABC(publish.Extractor): + """Extract as ABC.""" + + label = "Extract Animation ABC" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.abc" + filepath = os.path.join(stagingdir, filename) + + context = bpy.context + scene = context.scene + view_layer = context.view_layer + + # Perform extraction + self.log.info("Performing extraction..") + + plugin.deselect_all() + + selected = [] + asset_group = None + + objects = [] + for obj in instance: + if isinstance(obj, bpy.types.Collection): + for child in obj.all_objects: + objects.append(child) + for obj in objects: + children = [o for o in bpy.data.objects if o.parent == obj] + for child in children: + objects.append(child) + + for obj in objects: + obj.select_set(True) + selected.append(obj) + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # We export the abc + bpy.ops.wm.alembic_export( + context, + filepath=filepath, + selected=True, + flatten=False + ) + + plugin.deselect_all() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) From 36effdce72a3f6d4e82ce9150f4c61425af2fa49 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:12:34 +0000 Subject: [PATCH 087/202] Hound fixes --- .../hosts/blender/plugins/publish/extract_abc_animation.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py index 80f7a4ba58..e141ccaa44 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -4,7 +4,6 @@ import bpy from openpype.pipeline import publish from openpype.hosts.blender.api import plugin -from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY class ExtractAnimationABC(publish.Extractor): @@ -22,8 +21,6 @@ class ExtractAnimationABC(publish.Extractor): filepath = os.path.join(stagingdir, filename) context = bpy.context - scene = context.scene - view_layer = context.view_layer # Perform extraction self.log.info("Performing extraction..") From 484a77a3adcebf046bba1b2eac0c915abd631213 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:11:37 +0100 Subject: [PATCH 088/202] integrate ftrack api always create new session for each instance processing --- .../plugins/publish/integrate_ftrack_api.py | 56 ++++++++++++++++--- 1 file changed, 47 insertions(+), 9 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 159e60024d..1d65a53a4a 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -36,10 +36,42 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return context = instance.context - session = context.data["ftrackSession"] + task_entity, parent_entity = self.get_instance_entities( + instance, context) + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + context_session = context.data["ftrackSession"] + ftrack_api = context.data["ftrackPythonModule"] + # Create new session for uploading + # - this was added to prevent failed uploads due to connection lost + # it is possible it won't fix the issue and potentially make it worse + # in that case new session should not be created and should not be + # closed at the end. + # - also rename variable 'context_session' -> 'session' + session = ftrack_api.Session( + context_session.server_url, + context_session.api_key, + context_session.api_user, + auto_connect_event_hub=False, + ) + try: + self.integrate_to_ftrack( + session, + instance, + task_entity, + parent_entity, + component_list + ) + finally: + session.close() + + def get_instance_entities(self, instance, context): parent_entity = None - default_asset_name = None # If instance has set "ftrackEntity" or "ftrackTask" then use them from # instance. Even if they are set to None. If they are set to None it # has a reason. (like has different context) @@ -52,15 +84,21 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity = context.data.get("ftrackEntity") if task_entity: - default_asset_name = task_entity["name"] parent_entity = task_entity["parent"] - if parent_entity is None: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return + return task_entity, parent_entity + + def integrate_to_ftrack( + self, + session, + instance, + task_entity, + parent_entity, + component_list + ): + default_asset_name = None + if task_entity: + default_asset_name = task_entity["name"] if not default_asset_name: default_asset_name = parent_entity["name"] From f821337a8ee552aa91d5c7d7275c3a1d82bd3c24 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 23 Nov 2022 18:56:17 +0100 Subject: [PATCH 089/202] don't create and close existing connections --- .../plugins/publish/integrate_ftrack_api.py | 52 +++++-------------- 1 file changed, 13 insertions(+), 39 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 1d65a53a4a..231bd8e81e 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -40,25 +40,15 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): instance, context) if parent_entity is None: self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." ).format(str(instance))) return - context_session = context.data["ftrackSession"] - ftrack_api = context.data["ftrackPythonModule"] - # Create new session for uploading - # - this was added to prevent failed uploads due to connection lost - # it is possible it won't fix the issue and potentially make it worse - # in that case new session should not be created and should not be - # closed at the end. - # - also rename variable 'context_session' -> 'session' - session = ftrack_api.Session( - context_session.server_url, - context_session.api_key, - context_session.api_user, - auto_connect_event_hub=False, - ) + session = context.data["ftrackSession"] + # Reset session and reconfigure locations + session.reset() + try: self.integrate_to_ftrack( session, @@ -67,8 +57,10 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): parent_entity, component_list ) - finally: - session.close() + + except Exception: + session.reset() + raise def get_instance_entities(self, instance, context): parent_entity = None @@ -224,13 +216,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): self.log.info("Setting task status to \"{}\"".format(status_name)) task_entity["status"] = status - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) @@ -533,13 +519,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): session.delete(member) del(member) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() # Reset members in memory if "members" in component_entity.keys(): @@ -655,13 +635,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): ) else: # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _create_components(self, session, asset_versions_data_by_id): for item in asset_versions_data_by_id.values(): From 3325ee03306dcdc9a5de81f26c7c6d6e6dd16a0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:18:35 +0100 Subject: [PATCH 090/202] :art: change how the instances are deleted and instance look --- openpype/hosts/houdini/api/plugin.py | 31 ++++++++++++++----- .../houdini/plugins/create/create_hda.py | 1 + 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index b5f79838d1..a1c10cd18b 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -43,7 +43,7 @@ class Creator(LegacyCreator): def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.nodes = list() + self.nodes = [] def process(self): """This is the base functionality to create instances in Houdini @@ -181,6 +181,8 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): instance_node = self.create_instance_node( subset_name, "/out", node_type) + self.customize_node_look(instance_node) + instance_data["instance_node"] = instance_node.path() instance = CreatedInstance( self.family, @@ -245,15 +247,30 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): """ for instance in instances: instance_node = hou.node(instance.data.get("instance_node")) - to_delete = None - for parameter in instance_node.spareParms(): - if parameter.name() == "id" and \ - parameter.eval() == "pyblish.avalon.instance": - to_delete = parameter - instance_node.removeSpareParmTuple(to_delete) + if instance_node: + instance_node.destroy() + self._remove_instance_from_context(instance) def get_pre_create_attr_defs(self): return [ BoolDef("use_selection", label="Use selection") ] + + @staticmethod + def customize_node_look( + node, color=hou.Color((0.616, 0.871, 0.769)), + shape="chevron_down"): + """Set custom look for instance nodes. + + Args: + node (hou.Node): Node to set look. + color (hou.Color, Optional): Color of the node. + shape (str, Optional): Shape name of the node. + + Returns: + None + + """ + node.setUserData('nodeshape', shape) + node.setColor(color) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 590c8f97fd..4bed83c2e9 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -70,6 +70,7 @@ class CreateHDA(plugin.HoudiniCreator): hda_node = to_hda hda_node.setName(node_name) + self.customize_node_look(hda_node) return hda_node def create(self, subset_name, instance_data, pre_create_data): From d65eadb9b76f2f9bab0806adfcc83849c09328d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 24 Nov 2022 13:23:12 +0100 Subject: [PATCH 091/202] :bug: fix function call in argument --- openpype/hosts/houdini/api/plugin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index a1c10cd18b..e15e27c83f 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -259,7 +259,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): @staticmethod def customize_node_look( - node, color=hou.Color((0.616, 0.871, 0.769)), + node, color=None, shape="chevron_down"): """Set custom look for instance nodes. @@ -272,5 +272,7 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase): None """ + if not color: + color = hou.Color((0.616, 0.871, 0.769)) node.setUserData('nodeshape', shape) node.setColor(color) From b91c3f5630f9e21db6361452a3fa0b732d3a759f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:20:14 +0100 Subject: [PATCH 092/202] celaction rename hook --- .../hooks/{pre_celaction_registers.py => pre_celaction_setup.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename openpype/hosts/celaction/hooks/{pre_celaction_registers.py => pre_celaction_setup.py} (100%) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py similarity index 100% rename from openpype/hosts/celaction/hooks/pre_celaction_registers.py rename to openpype/hosts/celaction/hooks/pre_celaction_setup.py From 0f4c4ea6cbaac1870aa1b379d86b50ace6053582 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:21:40 +0100 Subject: [PATCH 093/202] default launcher --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 03499a8567..7f375a0a20 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1273,7 +1273,7 @@ "variant_label": "Local", "use_python_2": false, "executables": { - "windows": [], + "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], "darwin": [], "linux": [] }, From 609beaa75abcdf7c058b7b14deac0f6997d18b12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Thu, 24 Nov 2022 14:38:54 +0100 Subject: [PATCH 094/202] :bug: add all connections if file nodes are not connected using their "primary data" node, `listHistory` was ignoring them --- openpype/hosts/maya/plugins/publish/collect_look.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 157be5717b..e1adffaaaf 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -403,13 +403,13 @@ class CollectLook(pyblish.api.InstancePlugin): # history = cmds.listHistory(look_sets) history = [] for material in materials: - history.extend(cmds.listHistory(material)) + history.extend(cmds.listHistory(material, ac=True)) # handle VrayPluginNodeMtl node - see #1397 vray_plugin_nodes = cmds.ls( history, type="VRayPluginNodeMtl", long=True) for vray_node in vray_plugin_nodes: - history.extend(cmds.listHistory(vray_node)) + history.extend(cmds.listHistory(vray_node, ac=True)) # handling render attribute sets render_set_types = [ From 1b882cb7caddf5921b5e12a921f0b7c6182346b4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:47:44 +0100 Subject: [PATCH 095/202] celaction: recognize hook --- openpype/hosts/celaction/addon.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py index c6d30935a1..9158010011 100644 --- a/openpype/hosts/celaction/addon.py +++ b/openpype/hosts/celaction/addon.py @@ -11,6 +11,13 @@ class CelactionAddon(OpenPypeModule, IHostAddon): def initialize(self, module_settings): self.enabled = True + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(CELACTION_ROOT_DIR, "hooks") + ] + def add_implementation_envs(self, env, _app): # Set default values if are not already set via settings defaults = { From 1f38d061ce64729becdbd5c79abe1c3dfb30f2ca Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:48:15 +0100 Subject: [PATCH 096/202] celaciton: resolving cli to hook --- .../celaction/hooks/pre_celaction_setup.py | 31 +++++++------------ 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 84ac3d130a..cda268d669 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,8 +1,13 @@ import os import shutil +import subprocess import winreg -from openpype.lib import PreLaunchHook -from openpype.hosts.celaction import api as celaction +from openpype.lib import PreLaunchHook, get_openpype_execute_args +from openpype.hosts.celaction import api as caapi + +CELACTION_API_DIR = os.path.dirname( + os.path.abspath(caapi.__file__) +) class CelactionPrelaunchHook(PreLaunchHook): @@ -19,13 +24,6 @@ class CelactionPrelaunchHook(PreLaunchHook): if workfile_path: self.launch_context.launch_args.append(workfile_path) - project_name = self.data["project_name"] - asset_name = self.data["asset_name"] - task_name = self.data["task_name"] - - # get publish version of celaction - app = "celaction_publish" - # setting output parameters path_user_settings = "\\".join([ "Software", "CelAction", "CelAction2D", "User Settings" @@ -36,29 +34,24 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - openpype_executable = os.getenv("OPENPYPE_EXECUTABLE") + path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + subproces_args = get_openpype_execute_args("run", path_to_cli) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executable + subprocess.list2cmdline(subproces_args) ) parameters = [ - "launch", - f"--app {app}", - f"--project {project_name}", - f"--asset {asset_name}", - f"--task {task_name}", "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", "--resolutionHeight *Y*", - # "--programDir \"'*PROGPATH*'\"" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, @@ -109,9 +102,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(os.path.dirname( - os.path.abspath(celaction.__file__) - )) + openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", From 54bb955d8b914085582ca6061a84a6c003743982 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 14:57:44 +0100 Subject: [PATCH 097/202] celaction: improving setup hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cda268d669..76a2e43452 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -42,10 +42,10 @@ class CelactionPrelaunchHook(PreLaunchHook): "SubmitAppTitle", 0, winreg.REG_SZ, - subprocess.list2cmdline(subproces_args) + subproces_args.pop(0) ) - parameters = [ + parameters = subproces_args + [ "--currentFile \\\"\"*SCENE*\"\\\"", "--chunk 10", "--frameStart *START*", From 00bbd4ebe1afbcdcd15668af7611947aac4ecc5c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:09:34 +0100 Subject: [PATCH 098/202] celation executable fix --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 76a2e43452..aa04c8c088 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -36,13 +36,14 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) + openpype_executables = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - subproces_args.pop(0) + openpype_executables ) parameters = subproces_args + [ From a3c55730c164d5b03444c7dcad46d6086ebf0c16 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:20:44 +0100 Subject: [PATCH 099/202] celaction removing unused code --- openpype/hosts/celaction/api/cli.py | 30 +++++-------------- .../celaction/hooks/pre_celaction_setup.py | 1 + 2 files changed, 9 insertions(+), 22 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 88fc11cafb..7b2542ed06 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -1,6 +1,5 @@ import os import sys -import copy import argparse import pyblish.api @@ -13,10 +12,9 @@ from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins -log = Logger.get_logger("Celaction_cli_publisher") - -publish_host = "celaction" +log = Logger.get_logger("celaction") +PUBLISH_HOST = "celaction" HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") @@ -46,28 +44,16 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -def _prepare_publish_environments(): - """Prepares environments based on request data.""" - env = copy.deepcopy(os.environ) +# def _prepare_publish_environments(): +# """Prepares environments based on request data.""" +# env = copy.deepcopy(os.environ) - project_name = os.getenv("AVALON_PROJECT") - asset_name = os.getenv("AVALON_ASSET") - - env["AVALON_PROJECT"] = project_name - env["AVALON_ASSET"] = asset_name - env["AVALON_TASK"] = os.getenv("AVALON_TASK") - env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction/local" - - env["PYBLISH_HOSTS"] = publish_host - - os.environ.update(env) +# os.environ.update(env) def main(): # prepare all environments - _prepare_publish_environments() + # _prepare_publish_environments() # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -76,7 +62,7 @@ def main(): log.info(f"Registering path: {PUBLISH_PATH}") pyblish.api.register_plugin_path(PUBLISH_PATH) - pyblish.api.register_host(publish_host) + pyblish.api.register_host(PUBLISH_HOST) return host_tools.show_publish() diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index aa04c8c088..5662f7f38f 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -54,6 +54,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--resolutionWidth *X*", "--resolutionHeight *Y*", ] + winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, " ".join(parameters) From 3aa578047997641b304393ec613701e83c8876fe Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:31:59 +0100 Subject: [PATCH 100/202] celaction: clearing old code --- openpype/hosts/celaction/api/cli.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 7b2542ed06..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -44,17 +44,7 @@ def cli(): celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ -# def _prepare_publish_environments(): -# """Prepares environments based on request data.""" -# env = copy.deepcopy(os.environ) - -# os.environ.update(env) - - def main(): - # prepare all environments - # _prepare_publish_environments() - # Registers pype's Global pyblish plugins install_openpype_plugins() From 2f80dcc950b88581bf0b7e2c4daa37fef7f80802 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:32:36 +0100 Subject: [PATCH 101/202] celaction: fixing scene env and adding chunk --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 5662f7f38f..b14fb12797 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -47,8 +47,8 @@ class CelactionPrelaunchHook(PreLaunchHook): ) parameters = subproces_args + [ - "--currentFile \\\"\"*SCENE*\"\\\"", - "--chunk 10", + "--currentFile *SCENE*", + "--chunk *CHUNK*", "--frameStart *START*", "--frameEnd *END*", "--resolutionWidth *X*", From bdda909f1f319c76562d0f62881ca88b09780912 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:41:26 +0100 Subject: [PATCH 102/202] celaction icon --- openpype/resources/app_icons/celaction.png | Bin 0 -> 4012 bytes .../resources/app_icons/celaction_local.png | Bin 40783 -> 0 bytes .../resources/app_icons/celaction_remotel.png | Bin 36400 -> 0 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 openpype/resources/app_icons/celaction.png delete mode 100644 openpype/resources/app_icons/celaction_local.png delete mode 100644 openpype/resources/app_icons/celaction_remotel.png diff --git a/openpype/resources/app_icons/celaction.png b/openpype/resources/app_icons/celaction.png new file mode 100644 index 0000000000000000000000000000000000000000..86ac092365d009052c4e0351732ad1dd067329de GIT binary patch literal 4012 zcmc(ic|26@`^WELWY5?hQj8@eWr-Gr8Cyu|$r3_CvW#sQV@t-M>_SK(WGgL}Qg$=4 zmc}mq$dW8QWo#4EFwFPVZ~5>0@9+D^dEMuJU!VIr*SXH?T=#V+{(ZqzkWY#a03c{? zW^4rj47!B@o;}dugn!}z4ZHzn4nY9$i~hb~AU9tUg4_giOB3#|+>!_doj6Y2jX%i5 zo?sA6@WSCiMdb%G2ni4%Wb7W~hQ|gFusDAp$}6V;alYT&2p15J$9j4Zz;mGwry;uc z55>E?{+7xTuzv2~I75gRvhe=4SmQhhp{{s$;2gmIE$9E^92?~3>k3KnppH_T24SAx z*v=L2kM;Kil>H0w5aa&u^}%>Icd)^Z{qestBb=X~yFUSJez-@5*q+}q8!W-s9Xwec zQidW5H8(b}3CpKVTKXPfgm zWlL6rgkbP9oY;UH>}a%8?2!l&c!mQ9mk5PBbG1#y=Af9btUxhGyP~7T?6R&-x{ViY zsduSw>BACz=X0}BpYB~IOB=_IzhKKnvb;LZ1J)f;Ic(37G9$<{yRlS8?76x6ZlPo))XeR|A5ESt^l}N=n1Ps}$B(WJm#YpPA zwltjCPDmj#=NTew$?ooRiM6jvMVI_nYUTPS1t%^y&6~TG9qN_CCHf+{t=LaRTdhY$ z{~sH~FR9MZfwl3K;uo@e)eH;^p=?rhdB_N6bu&udFSL z1%4H9=@RtWeSsA3@+OkCeobW%oxDw^RLbvYk@lT<&fKqR(lD`dg_zKVjM;_q*F6rI z9s{lSdOADGYjawG{&K$C+qg5~p8m~|RqWTw?5%D8UuQq^%<;iAV%5gh*=O@3JfCux zGlSwIfL52@rs^@fQ}=JseX1|g7ljzWqlaQ3V{SG1@pajjOTMv4q%vbmJmyP;e< z$gjLB>|m1<1tIfMtu#7S>ZTHHX1aJ~pMx=&8>n8uMTRL5j^KAKqr$xN9GT*?jeBAY{n_kI48e7?y9&*5Cs002%UK!p*#VXr zCoCPNjT(A;o|4s|tjs=Cz(AS{tT?HiV)nCXS}i#X@d!}ScwlY~Gy551xj~UyGCT8R zm^Gv1a<0;H?E#*yMHY%`(<6y#yybT=0O$8n(~ii2@*Y~jM=#=aUYLfWM6B*-^~ck! zw#Z!M4gy$fcqR2uPZ8cp+sF13{IFULoU$uneCgG$3e6$<^>XA0&6$(5&9>DG9nlOR z(!tWdE}a8-10!hE(gj9#)K1cZ>5+Q48KqM;ZIu~0dRLh$l=SYy3V-AAo%)faR!@5q z(2=5|-y^C9yJk#R!10jo9~@r>O_0(dnZ^)e$Z1E(HV(-KoxiQ5(=xT$Emfz7PI(ZK<;g&DaDKI~FgZw%VVSgi%@!lye#W-0Y;v-QnE~f;f&U}8 zay^~v0>WBH^eqrlw^x@2@xm z>zg8-vO=XKEs~GbpLJaMFej-I)_k~fQ}$Y1=?t#;|i zzGPN_{y6XOmj~}3V>TUn)!$-KR;C~!A!639?C+(B_Noxot!xqNo9W!2IVBB7Uf*3)nP^ohzvh8sc)KAvlRqW`Tf^2d)s4wuDnG+n`thh)+Cre5ypgzLl= zqAX+U#W1-x@_-qwcP}-~Ne)yFzV5t1MtB{-+@hG9#R^H!4KClq60ZyQMjHu8tvMgQ zsw;kInlKw`oEybA_sxWi8HQf*`hM6zcM(F8mzs89Mx-b$xi@-d*M9muT|xO2r=?{)wgp@c+NV4% zi}7X6#MxMIIjHR|lwS(9d5XEI8t+*`-Cu8g*z8ApUgkZWk2Q7@x;nqsCMsmMSC6DG z(C_%}?%Ey+CV>RYolU{;`t^@J7}Nz(ZPa~aU2|gELyuJ_>4@OA%+cFA6_bb0&C}Uw zWimhN;Ry!eQ^_MNZcl?!rC)QGFI_=SWexFoinpgskQ`2LeveVkTS;X$eOi--qXoRD zRa026FXge9X~mG|v@%%QRpT?8ql%{Ya@pr-e<@c5&fqlnl$_|Xr56&;Mlfc73op;= z&@R$Hndb*TS6ju|9pE9U)Qq#RGW*+wORQa9!!mU@UlH9tA$Va3sLX9$6!6zyG4_rr z%wEI~lCa38`Oxm8f@27vdFmAZ_3iqTaH;Oa^?!2I&-%y2tKvFCLLr61@fj4Gn)+Fr zZEouR^`G9r-uB~g^!e|Ymh^4Ys;lgQlXv>(r|e4D>H1FL7T@b|9aa|fa8v!H0g{m2 z)ZG`z)LqG-^wPYj#cpiEl&A|iV%G%JJ)?ZiWR zV&jO-A?a&>GH<%oEC=L=Hp=3j_&;{$o+`Q_Ulo&D+uO6B!$qL+jlG26a`Jcc@z-c$ z?N2#&lS#Wq{~V%Ot#?(dC%B6&Ct{#(Zn$p_?W2f-{?;TYg86U@XLe=Jw36(qs|!bQs)Z=Yf3Y4sDfDM zGhWsH__g;>iP>7{h8h)##W!cYT?9i)rAMY&b|0b5wj+p~QuRsFt6H5_)U~fIjEAJz z&@vo%D)c1n^o{1FX2wFXWZgt%@}gc!Z6R7{zSfb(8{HCOkss>0Zs<1h__9E^B}KD- za`=i;9wzo-!9vumox(|SLc-!@_jf)&W!n8x9j{(f^A;rgwxQvau$NMvLQy|SZG zVx6Iz7A-3*3@+(Zlsg+fK^JZ~aZ0p|cWjRipjO&Cwk>1m`v3D;htG|R-@@MzTX)6I z?!-MbcoOi+?<@km@>>U#^9n#d=)ZJ#MysP}?R|X3%O~gJ3Ki@5!Ne#QOsU{{8>VLVfYI+sLKj%-l10-WB)&1Ppb7D`0j_IxU zj-eWF1}fip5v`vYGO+(pL7k3&c;Y3!4Epk1ny#Qc=pwB38XS}42)f#@vhDst=Uvy5gKiYDm`KL0@{yyLun_qMkS+(Qg2E_y&#XGj|H6_$CsE>P|Mf( z*1+QeTiqu+q`FhSXiXSs2BxuIem-Q=;XY#`Cyp7**{ZbrR$)7D*4P%kTROt1Xmby< zajQChSRB)6;PdwjNonXz)us2JE*bwU8xCVl|J0U@m;O-jS?!)_? zbH4lIo;z!?m>IhJsjjNtRkf>$R#B41LMKHBfk0TYG7@Sa5IE`i2NfCk2HyE@4EROj zE~)MQ-pSJ4%f!_JBx>$tY5|pXFtM^wvoJCDaT&G{1c4A3Y}B>gwG|ck&72%qO`gZF zdOJ7+vq2z15pQP`Gdl}+sHugOjiV6Facd_H)W%$hMvGgKUC~+G!rDg0*VW>^uadf% zubmm6IgN-gRM49rSir%;-303GVDIS0?=3|0pLO|x_s?Im(Lnz*#NAGa=Klz#t*8PO zcXG9Wa^;AyVr zmi%fGQvdZV;FA!IwY$4BKO38umlvzoTUIAmD>e>3K0Y>fPBufrD{qut!qEdH;&|DOxHsrxuvu&G(NIeEC60fw`reSVZPzqqS~iMx}l zx|5Ure``_2+R5F?&DzNsDlY!4H9jb#mW`vilb74;|IATTf6nOjLqbijSR#`@iQ(IGK4kSU9@>_gwS; zJD1~so%`$#4$i=nB`jQRJT1(nT%8=C|JgRb&HpM3yX616-ha(C|6gTcm-=7lvH`-d zJsbD`W$gd{3h9gu_=<$cSz{|q?b~EWGd0ZpUOGe zBBub>BaVnr0~q7>Vvk(HYLt* zM0E?EbRh&T1U)?*b=)1fT`l=lCw@b8kB|Fnu|U24PG@^Gjwv1xke`q}$r-URS}%ug zJ?G@ODcA7v?8BYQF#U&X*p)ZIk(meP+Uw=(0iFc|uf96K2i}{D}t(+GH8F(9+#+t**^s8E76%Ue!o=8$(rQid3M}-+|eqq8ip^20OD9(x)}t) z9NSI@rnTE!4&J4XTHM11E@)^o{@StwmX|{y4LEadi4$?}k9)1GjbEJO8q@l3T0TIq zH|*|OVBp)N@V@Y6ZDRBC{T9!XEn3uAmD6aG=0-of5)U}ig>=qs3Y3N{>!u_qXG?J; zZx?KebGMX)F4V8ewAx1wJD(mIyuCuq1Rt;0kF+{((_0^J_ZL0p47O*hjjE~q)CcA) z^l4q!9rd^Y1qkq1AJAY|C!acMxoh}9n3Y5x+(l3WvsV^<5=Ge`|JYSuAgG}2_>vf9 zxTFsyeX!>yffFAJc6GC!h`5Sw5eRe`J?v@Sft~+dTzp(TsQcQ+ZnzmO+)ACwi_(DAQ`F9_n6W6 zf;*JNWbUtIXyi~n4(hYM2cc*<==@^azG(9~U-)g6cR0bn&)jG}jE5iN85fc0vYD>c z!c^7NV-R?=dDOi9>1X3{ipy6G+;R-$}7xaV;pkI|Zj&oJ$4ACJz1=rZ}Ruga4(@0M%^_KsAhi7 z^{^uyIJL*aAooSZYhP7w9Psc$6!PZNs%XiY)X3wSFpw4&Mz|+hRvtP zZr-m>!`A}Il9Rvp0M*%2IV*gK4Dvih?t1FLIZP+kP@fjkq#D-GID$-n>7L}*tac7%jv{yQbl!{~^A@A^^;z|Z2058576mvvdn=+I3zNgz`NuyBS3k$Ho;8#y82{!NxZ~K_ z4jdq{YAXKZ`{o`WNDt2T-lXE?itA*DB!Ckh%%=)(<+?)4nldjy_k$vjr)mY_w1uZi zC;Uics{ykhJ8I36D)aanP5X>72nQ)L_O#0|>|?$2s+iX^nsNAD+W+)EBo=t!$Y3)% z>Ug}UzB}#1S3a2~B00wYeZi7S2^__@&g8GLjgnn3tCt@y8(Y2F)+vrQQkCDCm=4yM z(d~G3-sRr?nd{i+*vmHl;L$M;aQw zKm-NtCI{fUl-M*z030uNb-{z=2l_4JiQ#cpC)X)VmsD3a&2SV)CR*CUTeRQd0vI^W zn;Cll4%tTUj%$B&fJl_PYE>vZXxDk=$~v#wk1y7N5W(Z{E5T&Y`h@sR>rv-Z;8j0c z;E#Y^d84y08{v+rrphx+SSPn8LIf4@>Tbnj+Ej}1*O$guF|CBl;_KbU{2NjH1E@ofn z+s-MZx(m5~NL1i&$f!t@wF@>&iL(P^%T)!Pgi?`%DW#;O3dP}Q>E$9P!Pqsd25k)0 z7V&=$h~e%u-sCjcSHrvxPY+jKc@GCxi$RauL4W#)NgAx$zuYc#KHYV;K3yk1Ij5nT zI@C$6H`tnkra8v^EiozYR3!-_B+ofGIZ6F)*F_%9u0BrsOSO6)y$l^Hcd*2S@q-0r zethY3Pxz$3^}n zUlkxk3$x_;O&4W@Wm>zM|?hC2f8?EVhE!_o0@=yXR+-#SmI zXP(25*Z#Xm-9a|1r9!6^gC|XoE#alKA1IMTn4M`ocIhNoNwnG`uoPHYN#m<#fFPDT{tl2uV4vh>EXy0k!AQ*yT*^k{V~8MVt9 z8(;M5Z}g`GzM$9GRyh0Dqn-EiBBVe3pRJETiH>wZaKiWIpw!KT9?nhIRZ z?fxT;JU^!Skq6_mmU+o;74b1|=9?DKpwD)$6~3AIuV4b?pQ*tY8_TK3^m$MJibXCm zh8De+$44VA+Qc3oKabaeWrS9`2fCV{Q-r%6X_38osCF!~-T+_#;#M>0Xo`;ZCglb3 zOxH%sewAC^OGP0PY=au2o>?#3*7wpk&5w9L-dSVZ9SncwY}3pO$rEZ74d;=7P>9bCPXU z6$+(y7mIFvRB^TiIn%L-z1lQ$lH7`DqK>{+>hk6H#u=n%AI~)j4%3VhRu=wBuXeF~ zmeF|PUdJ>$#lK+r8&pKTPC*hXjm=#td>XAhUqnpe!I$|+>}g|&GD@6%-; z)U{~53M?~ZEj`>TBv^cSAo92_($eg<{d^)0H#AZ6>wv7XwezmE^QTzclu+#$JBY*b zJZ|Oug7Hpa#eP!gife+Mlhg{PHY_G9obgSAwOd+MqUaH=I)wG^) zo{N>pKjMVV$C@F83^f>IuK)UPJpV*PSHl2^D89_ezFw7`dm7cQL6=rRTR;Ugsb>{; zb=maWWsJ^hv=^zqluoZFOB5)Cztqa?f#Y2N-OhC`1?-4ZxxTd=TiHaaW#$C!(tPbw z_8^(uHKP1g10W-(EpL<|8dr*AWh`sUPyhhOzZzL%EmFWTFWCnI&zvd#gCE|Y=b>*< zXg0fw44q2}@Vn7(Qblg?1AGjavjH5>MLc> zV<6L8nEoQaSRM4R|6uyt!JbKFz5TznyXepMs8F?3ab92xO7ivj zajeBh@DStD2Ghps(`imTKh<>?O5AyTT%0a4O;d^Om#cRvv*p#xHxphF^cjyv%s(+< zQmb1XN|YHua2GzR8)bESy2*R8A$J1s@tqwVmNgfIA@6?U)uLC&Ifa9LBVC3AGSs21 z0AsV;sn5jg)gltGc$^fzpe`YHu*{>+pukcATPM{hFGAA0A717PMacmwlwHquJ%r+Q zXzlwoRA2{;gJAQoLhNHLa={7*bGJrD8Txoh?dJBN1_mhm{)1viUR zQ#ei(^v8>KplMQ2b2eJP!$tFCKvltVO}rYA2u4s8CY#ZDefgHm?%zqCbn?*|--P8v z<@PzMbO2@1=|4)JCyQLxY=Nb4dY)A#_~2T9>>^-B!}pQT+tov!!7Sr3TMV1rHTMS6 zD{l(VOPkI{ZE8 zudmt+abzK~*EugD;4%tEQ5K)UGIF2AHt7~LGwSRBW7~k#63NO{8)w5`8tzO0m64%R z%=4favVWvLyaB5{;enBstRLQ1adUF+Swm$zwAh@1gPas_pt9_>9N^>Q%S?s^D_ck# zK3n1O)ps4H|4+KE^gJA$N#cn}cvgORe(1AgZ2cJ8zh-SIQn-44tOc_-P);6#`<^}I zy(k@4Cf848gKk8g+1JH6JwK6ZI=5G@4Rs?I2FpMm zbun2y>^GN<#F-*I9z=CFKd8_EpxvIXJe|LC-iJFSQgqSH$;X39sUbOpq}Aa*=JSiH z5+pNoKvVH6s#p@!$n#7-;S&PKg3bPL5Js|(PPq|MVr2ru_U-FvOv1)b-*w=IXQ*Sd655Rzd zmvm9=6E)7ZU#jsENZnzhq#bUjmu=dLW?nQ{lSvyv@_* z+LTqZ!=78jr&`qrhr4GKT2yEl$=R{Ws&zMfx}Cej9hu5~0W-(LPK%8Fs_@bDIeFq@ zh0~(ZY(5$uom0c;z=aELAoDZx&esp0wKI;~iH5%1OCS2JQ9ZA=4wkV6%QsAFK~m^) zefLU_0H~p)miHnBCCdt69%S;leMNaDZVOK>Hj(b_NjI6aDMVUM4l z=0649?iP~`J|B0ol@er{%7endwdhw7nnTOS1Ry-x&70PvmU6$J*-Pg7R)BOz@FMc4 zG=z_qEVv6#E|+^ZW+vfr;YepZ_bA?NKse=ee2!O*voiEuC5SW^AtOwNr!nji_dJDx zxrNOD&TNBxya_rNpSb`kR3Hp;k}4PZ3A}M&#^fQ?d{dvf0Mm9%EtozIdh)vhBH3~~ zz*)5kstBLYTN~_KJJH=UJ^J;MEwR&Fnq+4l5CfPq;LV3D_Q|%TCAnr{YCJss9g!{f z(uP#;l-Q5)h*!DpDFWwXZ`;)Jv^u6kWf<)SvEV}+2dvmsms*}j z%>yRDDgxX|zi#zex)`PFA&I-awPO@KL_;&2TP8jLp8{w%s(t{1*1Zw;cH$%hweDR; z^CC8slVe_k&8Xfw`EF0EbI;e;H?AB26;Je)5+q6+=7f5QsmJHs|33h9bEbXi*^YDu ze)K!EGuqGYB?A^cRDh`|D-==27B1Yi9)8zr5?Vtrz=Cp8OAqzxoNtYv0~|Q;(v&&$ zmLY|f*7G2Oe%n>Id;4lKhqlf_P8aJ+=3M)5rHQ41#Xb_M6|~7Vl(jc7C*r8T0Layv z5otXEBFn$LHmyxffAhZ_0zgWVr2btd>kC~MjzVJ4RX{EC5kJNT_haW=+W?DBU0>56 z5NV2$;_LmybdeZ>PEdkYq#0vdv0zJ;|3?;eqtU;*<6lSmVLjVmcw1{ZNP_j=;>>vu zAf=7}Gek4W&7)s51)e&4hPn$76Q1YWliRzcwF*b17=2FB=2jce5J3LCu?>COD~-Z@ z+z#bSrf?$hj|-klTZjhSU6`}={0u6OP-tGs zj?8QpB^v-t;P>+d03?Uv<8;E>MHv^X#El|_#P@bzruYkUacwLCA-zS60;QUKZPuBX^7SJf!x2y4jZ$zlnL_xv@z*Zu_H3WRcVLs_Vfv65+j zN*w#5{iis|;v3rrjD1K@gu{V{p;Ap4L$2`J9Y#P*fluV`;!t8N@u1YJl;FyfR;j|q zq54BBrZhy_LZyv!TX?~-R*X|ogIIol?90~j>JD9te~kh!U>jnxyuyA!W5R?YL-nUN z3NrG-cZa`%&QtnjUwRHyJE?_jjc0nd~MZ*_;+J*<3a$)rNdPiv^pe&z?ak zXXhb8Dp$0OpuXi_L$YeIHch21qef}dXUNbqseZOOIw_?&YW3-e3u8m=QD8Ie{leqj z@v@pTqtz|eix(}5+3$SvwcC9g^nezKUW0AA-)aBL2ijvWIUF55Why&&5TNmA z4grGRaqm~D5quNWMq%-aDV(q9^d9)MgxfO`WR z*1~$C$kC_3TbnDuU2o?=XksYv(ug5yK-zJH;U&N0!&h00*$1DuZ$gkT1T#zana1MS zKgiv2y|sS0y*T>hcjN+am`)4-c7HwynYs}er)&U~zT?)7Ts9h0u&mH*v4#@29=1%O}3zBwq$#(TB$j7QIaO0W$_S51ItlVZ6bd@F$1Y&OgAkv(KJK#DwdQUCxrk~@d(IfobBeSSdhkgv3M~}DNxBwNo|F! zRVdgYTv53vOP7a#A3zGu!C>`7;oB0Fv`DJJc?uxu6#j+@?KVXVH}Ed}sUKA6R{xe< z{P@S#c=m%($HlDP18vpFGcV~lk;At{`kLoT;IIB~j+Q&(;f*}!9ez~lhRNcK@AX-_ z5U6Xd--!%nK{6J8-pT{q0f z2=1Xage9EQr2VSa2^56r@Gx{KzK+^EAQ|{)M)z~ZN6E-dBg-Sz`1;2A9GWL<89VE` zo5CK(88MbIS-nSqv4QxsjP{DlVuJn^fyvX@+eztRH2{PA z6!jH72}G_9UC5k*fT1HWdS7Egf$T?+rc4E}ntJmHmvsB5UiH1yR(f(DXF^RKW z(qf8jMo6r3R>6o&h;mthGyb7)oD9VEd9%a|n=aI<|8!t{Hq(VC@Q^oe1VjxgUUtau z%UpKMPgu{@`C&UR15GqxRT|mOyT#)+y_5}PP+wTp5ASa>WfxM_hzvHH*dV+oWX`*X zvw3y)5<;RO)2HRN{y?s`E^>%9L6_h1d_bb>b7*Q_gk8ZkR&56sZ09{9RTn9@#G6av zP+T?>Xwj6rEH(XO@IJbOwiCf`Q4ry_#3I|f-L%$KS%(sxV6DlkR!KdKNpOGg&A=D! zgeV=8*C_#mLOtBS(dJYSuWcsEmmv9-9g(>MTIn4|`#k08U>nbbH~oKgYk4Yi2-V7q zl&?S5hqd*+?X%o$Ra zJkzYz;Sq#E<%>gHGv4W_Np3>shG5O2VN^rm*9^TT`XjXsGDkfk`CA*7}R< z&fywckKyDE8b}T!=5T~qMb0Q&=fWZo{)HVs=XI_p2nbG{G`oWE&sXTQE9P7essw=s5CWN%UILQ6gYYIt6#rLHR9G$x!>31X$NuSM?Py z=3<-+?Gtp{(`F0yz?y_r^S;ONvq3-a7UV#|)v{7(H+t`vHA*50DYIt!Mi-qHvc_J! z4cTp_^`Tc0@^$HRCJKHA(XeF>vTt^zJ8Eu~8`v?HZj_$k9=Tu;Ci52Wi(&fH-6IgkFo@F{Y5;H2D?D?@!WOu zTJ-qPnT*PgwV;K7)Da0?{Zmtx^&9oWFMeY>q8A})8{kio3X&~vkDzr~Q+s^M%sdmV zzx$War>1U1iE+-3MWwZcJxP!a?TQmrJZzAa*r;zBUFtR&LP$^x#ly|ZL9R9O5hJOm|H zI4+NV8@?Yli5sXri;8lD?T@rfjk@ZGl;YYM8j+xv82XIbYD#e0w2wT~4KClDgOah}(IJsvPqpgOR*<%0J-V9_?8i2{ zQK$}r!*5;~C|``=9sWtf%@WpxnLh+IS()ki^H$-4!nZDbq%PQMX1ipM8U*y3NP0Gh^?1; z7&2pWNf05<;W8j`naLG;)HdnRZ4zZ1d92Qxf9Q5cU9Ra1c!h%ziq!L2T@1(g^>glT z*M^E4R(rI*`pvpq-!F?U#uKOQuH5LM%$(0oR9RnREFgsxK;FOUx}i6)UAA7DmS*9Q_q-tZ`rPbwC&CRF8O1M~$VyUb|VoKAYIfr51vGP3< zDF!KGqzfn{$wu8<>knb&%8)5`!&fPY3BQ*Kd+oFfuDSKm9G(|kq z1A85k*zAQ0LZrPy7A>8N=-T3*Ly@6TCn<{ihS9n3_|l`Ml3zf zb$OMaFKEJmXUTQR=DI9rv%BHjRpiZv>Xk|OKB7-ofRyoinFXac)#LVj1yfhtvRKB6 zu7SxIeFo;a%pZ~bv38rj+%M;2LTgLq*@W=5#|*c~x%Cmb5L7p;D5wZ8t7c})aTYjf zDwb;xgm?ACi-2GI=_ph-=v6AXj#J&c;zw=Bjbfud3n4a$}ufn zt*OdqkT^UFkDnZ+J8sJCL|Av{W;^eg@OzQ*nwQHDs9hH}pX*KLAl$-D6GD(J~eBu~~PULeNJnAN3yN6Mrs z^;|qU#Os>kI@qT#Bv`9n0k{_LKvxsQ7thJCG!iS7$wTMF|iFu^Xn0Nvj#6}g86uR$2V6#Fof=4 zz0V}Unu?>x>eJz$3B7RBPC&*GYVsO;Tx9F)vTzz1oIvaH)wTW*YE4L|ad^WU4k+Io z=w%NI(2uHZp=BUccc!IhPo8Z|3x4cNvV{hTRuH>`h&> zQ-K>3ZPFUq+&n*Jv!{?~j9o z&ea`%y0mUk2#)X#*4;}NB12t8xdwQ#w^}z+|1%dtO9hz*Z1g^?V|-|xQ3$BED-eou%B1GBsrMQThq^68=vCc~&C|0D{wnQ7PP z9f;^90)Z>OJ$*|+9jLr+0Mgk}t9nJ5GJ7$D*}jV9bdr%bw0xel>)MuH zXc0`l`h*`ZpUwOQYbQ)d81w?mGdxTRg+IkXrOLys5H?*#`q&Vr*i4plZ?LG5bI2j1PdCa3-3{fAZ_GBj9{@kcG;=+0woE1^&*AT2*S6Q#> z+pWuHRHo_Txc19ut;$Z*m$1{27UZ8VqP1^TIr5vMh2*#!3zEU5i9g7Wx|t9nGvhD# zA&zq^lES3GtH&o;8f?EF!VXI7Bfn;J&8GPTCa$l2eqkm(a}BM~RK<=8zX0#kcachB z?K5ZW&Lt4R$#Qrz>K+~+W|nV!h9UF^L~r5@;BsAt6Uo8f`ZzX~sR-f-Giz!{IfqlHP0`fGjgI>j z&VlpP@Z;miy5tQ+Q2G3KnqVo5Z$|se_Hth#l4kHa;=*O$e%Jm-PS&U-G*~jAXM-m? z-IQE@N;o+6&!~d8ix^2QoMh#6fT!b!9wo?9Xj}6rFk&Axn#M~C>$VC!jWxP9hg{=) zC>bG7Ta)^ZQ9{1K)69A(E760{ma_FoM<#oEWB$kEi{Bq>Du#s}4mdeq7HG|_D@47f zMK+s%cYvLqh^Bi`vx%BN-Scv!U0>uk6JqBNL~)F?@UbJ;`T;o%LKFBI%beJ^zH2yZtw7N?Z`pA<1lLsB)(U2ENX`n%AsoT5 z>=BYcYe+BkS6Gl1LJpyUAMU=qu4P1OX{C@Sz9KTj7S?iFfXF8=_h*T(!WaudW5H}Oj5Ld`;Jyn>4*mlAe8+ihK#(o<(&5bCC3aUBd$B3pFMR`LL2KAxXdGEM z6^$$_L_Pu+8X_+(??Gpfo~!*w*yq^gX*Gs+U2aZDS7Zgu7(s}X#iE#6|L>y!!Fy8+ z?aGrEqW+7qQkP^ztoL7P8@bL$tX!JK%o}l2#W{NLx??u(Zm+oJ)T?r2TaO$-(8vUI z_)s`=T4dpm5K<(ZnFH;lrfM-z_m~8Rj3qm9k~>@YSaqwdiwGFH*C8SE`NpPs*EaU$ z6^H|cNU%r72f+*V*d&+^T=GMKnCpGWEa?@jiyCvvJldz8$$_8b8|=j7!abXwY=U0L z3st+`O_5frE5$|<5|XiLZK3$hl_c#1**I<#-y|^{O}Qr#goa)Lk;!s{Q*n=HW}*uw zWWU%99j^wxr}y4NsvPu zLevpXwHB&}8A^wWJdF*+=-UzU3l`-!l#v=(Z&-{Eu|kpJxbubGuA}0ZFO^orrt+O= zbxA*JF+ZLNx=_5f#x0k%oU6ZGO0#pw5?r!Q|7U}AZ8HO^?UEnBXMI8Bj|#Otqb1k%F_KC$J9omro|Xdc0wI@%{HEak*fq#c=M2|f8w%JeET zF-YtpjyN#!{nFDR7n#=0M)0gHRz}KP0ag``=Y2+t=8sIYh&`!(+w;Fl9kv^9XBNDN z$Ac^X$PfoA`8Abjlt_}V8n7_%>$(w6AXTX_<5b9l5W^F+Q433nn?cfCauXZmG-N)Ao; z=EL!~d2n(~x=j|AX>Lzz!@I%;Us@BHChwk4G^naaLZ=hH+uwfspju#T2V-zCuZL`Q zO~q%~a6iCv)g>(YDreXn4=Nbep?&o zL++`7+YBl6RyB`#$U%Uf%(wO6T7v=`c9ELGe zwXa?*fgY`|7e3S~WQsnnsG*N8e`Eb!{;L8r#HuEt!Gt(l6?d93kx`8tKd=Y=P2;fx z*Wx{+i8rb#Nsw79s83)NXa`lq8ZkxLDhADvbGZGs(I3q@vPAbo5Bn{6!Ky$+oki~U zTlS+}!3Es;m9r9*WB1a$_D#(%yNJj?q>wke$aI-XZQ+_jHf8G!4R5;;Ixtzb%twt2 z-Tg^JYO8{_>VC5ZRwjv8bl2G9#b|%|-h6Q;qRBM#@gstFG@jrX!kywZ<{y#4*?%4^ z7My=-$MR#&Ls~YpX^CXMfj;X!fw6HivrDQ_AO$8i$ER}aL>w3pM|NjgSgFttm!~9j zovH%$S2#Dv_9fFT(yKS?cC)6w@nQd3h{XGUz>(yFJW7LQMxtx zyV(&_hyf%8mQmm3^g%4fg7}Xbo^OAupfi0=W@i3m=YxD-nrj>$7FQjOqx}-~v}WB{ zO{G!(T#|&^8LcbALfRJhyu{%07q@nmZ=I?ybuKef_3m~q$g$w!kF`QUifiGfmG*H{ z9SB!$-Yhz4caRcS5Ra>ghEmN=L_wfS!mp$x&WR5`B>R5RnS+!~2V)$sLzzM24#(Js zorAyhx}n#$V%_G4tD-eUUkc;!j}p6*3>>O*I}8JG=3XC7k9OjBWXt*yQ?*L0aV7ar z;)p$b!eGUaU6bkr+m%5dlOHz5l;B<`TF`)-e-%Yv&f5U7Oc$Jh?VCZ_k;V)zpK&I-|N7 zZDxmoS;-Ue2AT3*9?vttGCE&jc7gzH79_TIoupep&QUtdQ!!2a)-0X<8J6` zN}vUg1xWtvxm9B-%3&&`?Cx>u5+P58(JyXa3m@b9b>sD<4?teL_hlk%5Z@{2TIocF zgZyF9Tw|w{d3Gow)<%Aczpdimeh}o$EAZgVLX1kTFLdFZcv&OvV;%01lcqN$h{sV_}zuue76~qcUEqPPA*kH2gjjg31#m zIU&WC1uAtSHAjw*4z0OCKIB*s;?E-WJZplJJ<8Xsw3iCB5|LV}+1OSgPxfYMF{0{n zaUdq?Zj$bKEGJV209%DN5R4Pd3~xT@K0DKMh$9QZVKEq?h|6lUX^0Go0|TWv+eVrBPo%cwN6wt=_{k z*<6Rs7rMFR$fnQ=HvevmTiKDkzy;woHxEx86Z>+HWRq#lls=-{_?|HWg6sfV4P_Yp z%8M9di)!+am8;6TBgjF;ECULQ_c!Tf^hU7=#wDV&w^)Z7pfZn`+v!zM#}EIC5wgL_ z6;ftYY#V@p!)oH12Z|s+_WX=uJIiM{_az`xr%&Y;QGcV5jsG0hF5Q~OkB~J8*Ce@+ z*xZMnn;H=rCVOfiD!=J2TE@rrx%<0pxTbZDPT7lY66pL5cizb>CQxO@*Yhu^h@uJw zF1}8&r)QbJNgGyDI63=N<8^6m$6VI$N||@Ju)XknL%zAZs-%XOG=c}{n7OvBzW;SKt$ZgD1u0XX66Pr59t+ANM!V^n~bA^?U#Y1 zilR}U6{SK2ULFDm5C-BQ2rERg_H9P7@JtsZD3)N$-+ zV8u)LNEb&f=wMiWkwKpy8Tm&waQIITbj zi8y@B{AP`mHCVmbe2QjjTc-_9a^?AMz(e*v+wFGgs$=`_`!^=0YZDNC;m6a&#!3;- zMx0pO?zo!pUGZ2rl3WfGrhVh>C;FgRUt4Q;?HCgp=*?$e6 z)|72QeqpC?N2**zRt=0Sp(3TK*75Xk?8n1ir>ut_hg^`^=OS+&Xa^4K*L+Quub~4$ zOvRBf0&dL(_!{g8i?Oc?_tYcU8-FhnDAMf=k=rJ=eZ6|cP6FnFuZ=KO|Jy_n>-neg z>qVlG5=LeHB=v89`;@Xs*< z_EnXU*gd9Ut3IwzHT*ae+HQL-A<`i3I)w6zBH8!g&!7AZC(bl*e*D{FztlU-9RG)a z_B!)wQE#iKr$v=Yrx~WibW5>A_j%l8P?}8cY22a)I z>|CjzGcg}^8}rP}Zt}a=E>Y1GNh`y%pYOHA^Ex)K)7=G!VKq!=u*mpP3_W+|X(na(0Ln$2E}QR!;7qvIB?h7G0oS2^7G zhfYC%wWL+LWfdX@ln$CH)4ZYkvZk5&pCxY9UQ?KWD z+gw>070o#EDxueq6M>6g@7q5|I2kR}wq zYF)etB$ylUA_ZAp7xnB5;rLio90RQ*Z3aqG+v1uyJ^@Nbnr>`d6Yc};2A4ErlUiJO zZvZ?r^00>jfj{OBVAuZ75Za~|F-)fIw$o=H{x;_dfFU5TKv;;Xl_FFC5bkHWYO$e) z2Osep@C@JQ@3>zeG6k_x$s}T*2**RAA<(Y%v?^))N}LaTs8$Yu0f4Bo1ix@^QC1MV zfK@AFFU|V!>*ZZ!3G>c%GxwDA07O_5fC$&id;;#cKyeZBoiK)0ZZ0_n^T_XTuQh3c zxkQGj8M}tg0RR9`=AQXRP+>t(+>XzbYd94? zS1TmEM%~$t(gEL~c&KZMdxD?>NVNBLsO54Fx-k{Y7l8*T0YU%>nzLpJY1>h><#WzC z#|z)E6>_(fRS5;93URHD3=aXu%%>(|Fb~Wn_m4DN?vplQ3g;3~*Lr2lBWsy^fox!% z@t)2RO2eW>i+s_-Q%*Uhq!^E7t-W$F8{`;8z8ac5SK-3SLfYC4kHrO#o3s zYeiTA0!*fwp~;=AXfeE}^_LVl5X=Zkgh;4iYH4m5jvE4yXtNgdDPivZG|=%kt=sMOl3#Grf!jSSa;&PhFeNB{){E`Y88f)z-^ zD1>Oph`1k^tI=|OR#HYd7laSzgFs?IYVZ*u&3(X{49k{OTIBo|iVZx)T>Sj!KQF~r z;2$h7fCN4SaM0*9(Sr4(R-)E?LnB6sVGemj>sGgpZc^npaMxmy!+)%aM%^v#>@m^E zZ|Jr02sjW2Aq^a29+?N$2tdyqYON~f5*}n8SkJ6qge-H&+G0NSk$F}g2Z-Qb2Ef#a zdn`GJ?P~hOwQ;?yZTJNlrvTEfW?X@VH=~sRGA&`=v=xEFMl2kYMT?+hfuq%-ouE}< z>T5|6tO^Db%mPHX>!{SsMm4~4?jt_W@zAsZ4}6juv`_8|O&g#OOGd-gXi1D_2#bTo zgKLB9Pr9SHMvfcqm`59H)Tat?lKG<+8jBS71MbAo-G^|+{eZR?^37;y%AvKT&$(|{ z54bxCLaW7xwvW)ldgXp6&oAj-z=}YC;g&;)VU-g10YtDKP)ZmGF2D((fnOMahgyi* zGFSU=Ys=S=N6?IMk#a07UR)YlODd_5q>b|-WHkl=Y5$aZvv|~vth>1NchtRC&pJm) zqU<5Ol)soy*0?HntP^Aa^N2iPF8PiE2=mAsA`f_kKSP%?_n&!$2S|a&D%Slw_41`E zo|+o2?#EWt>`U@4yrO}AcTT;6Acm5Xz0ZLNs2@CZxRI9X>}3yS%J*IDNbZH-(mW)*7h zoDb)vsj*ma@THbaYSe9|Nidu<79@|Anv}K5-G?Rru6Mo5D`}*U!&_Q}vX#dTWZcwN z0?3}KeU7ESRlCGt5x^AW=HoITy9Qq;8X($bBPAwqvv8AOFyb6&D45hR8=6rw*;3jU znc$}aQ7h&Tp^Z>M3&!tWE{Lx6eKshma61P zCJlNC!#yGo2NJ~Q=8lCnS?O44cmM}Lf>2<=V(NujVA8DSW&j8(>|k1MK8pbt0oPcK z8S2rpl+c{8mN>Uom5PRexfX;pf{+omY~>ibr-gYzTWWN-d{`m`YY=6E=gd7Cyw0)C zEAz)f)0oUy@08};5#-E;mJvrVvnY8~H>noUt98DW?jOPw5Mf?)&xj^|guKX{s3nJT z0eG-@5p-A^fO4`cT{GGy=L^82U@0%CQiyv;6DraigWrOm$qr*aaouQ_Nd|LR?>(vwF*Fk~pRR;IdchUF`obz>l*N2E;X^E z(p-ja<3{)@%^Wp*j7q%Abpjyaqk<>^(#Se5H`f3(@fM-yAnw`2rZb2W`e_ouG$}qn9nA6BDX7^W3=;;2F^8ML)}?nC4#YP=UR^?&?%(Zl`1?H zsKSl8J4->+cF2~%Rhr7zofbg)6FT5voy^5FfuYS8ebH+F;nGYrv;Eq;>Rlj zhk=Qak&#jRsU5O5knyR^w`{#9T*-xH7KxFOkx~0Wqg=DDF|%0Go;UyLPTe#hVq|1w zWYk?0RAWdb%+mshPhJm8GNf&fX=G$%WYik#V-x`-Rey=J0OGfCmn(BVnirSaR4PNG zk-KikXzejNfp5xvE*dp~Q9PFMRhJ(lY6ovbWkU+hQXu08(|kVdMnZmUB3tOS7+<*! zGszys?b;Njhsuzkj@A1b&Pwnc8WoNyt-hThEZ zg&ALU;4v~TOJ(X@p~y4BM17ee?@Z1yiti08wncu#5^-hNzMS>ZE=wdZ5>j=?stZ80 z<5n%}V>GQ1gw8Tq^-pe%+*4Z$Zlwr>m4X`U2LTlX0pKz6+%m3$Te8_zpXU2^jMDcr zF-Q!G;2wwp>Ma2U}V}|etV~+3+JTqUEq&YIC@4z<*s@jTEJ6%}q0ubM! zi5>u9)143$%Do?!A4pSzd`C0Ujc`Bd#0|;fzHd|B-M=yK9^6!L&k1m>67B4zLFcu} zEm$8>(TSILmJQa)#NFK}H*2b_xRZA8aR*J#x@kQK?&4Ccx5-oZyGdncvjEhi!W{Pq z?>s2H^Zb^`^9_I@Z#mIH90ojYw4peH#xLaLe-_FFuSJUGA&-E@3WOpz`j5>y|#R`%8)hmzv zo_8W|v3f8OrDI>=8+hgj;hjT-Zzko2En~$edw z@om89f1TWouKhsDfkinWsdN#;weK}JK2mGkmM=J2^KOHn!UM9}f4_dn-S$!uK-PJG zLljDmq6&}+Jr=PwltL?Rcum?d>M{(x)>iN+?`DZcwq%c9w|IJw>(6C(;EVK1;jrc( z$@d3DquVIFu}nbfLE)P_UMjeoHi+^e&o|@W94hi|q%N~0-{{E};-4-=!)m_>gsHuG z0g!?_TE;78{IgN46=hQ2hT?%H$Z?L`CFfqbN1t20b-+Em zSxgcHPA)J(jITQE7=>Y_*YkK3GpW}|1Vpm#ph*RH%rxN};rSz`7OmT9qJZ>zW}%gL zCaAb8R%hI=D3qtI?!!dGs!-;)3P3ih1yZdaNVP1ihTT~4&o38{xJ?%RWvhqWjq8To zT2U9VG77SgaGTZx+YCqJ4QVuvfB-bVz6MBtb6sWUU%BAy*>ugDiVXk zJueoI!9ks{q9J zztlFE$&;@IB57P$!#^=RaRkSg%Yu1uW5M0Lu4r-GFBZp&?eT{J51*|g-^ixW?9xnj zvp2+!k&5yH92wEFCW*%NikUsKczV5%tk=i8V`^^38wqO%BX_GKtsmV*1Rl8 znb_Ml47qF94xPyM)seQ@mVNssWd9`YoUz*Ld)P#=C{-W0YJ)STGeu4)zeqp#U>sL@TygV?w6~F z+@k^{YX_a{>&^PWVY!86)E%Vk!XlU{_w!`Y(hr&pcx0N+Spp}pP(9zQ9CA-@$-DI; z*?ao3@xqO{Cd!OFpQgMS!=Q-ox~H$m@2f>9{8p@yWx~5FWSl)a;|`yajng+9r)l#2 zLuI}ei-1`}CYh|0OmC(sSDDe+5x1>ZfTTxm;2`lju`qtSc1U<8@17S>dG&5Ncc54z zlaeho+T_A&mj$xUL(=;7BS52(-RPArTHhlh2JP``5dhb%8FCNG0@)O=`TpFE`8nA@%mWlD>?%SMqgZW~5@M~uE zdbif}o=n|}RruN20#dJ>+2fuSOKa)YD30+*KIp1Jds_+}2o`~bt)fdUMxOV|n?r?+ zTP=LEMHHdUZpf`4$h$Ll&A6i{MgH$aBc$L*J17WKtZOBJ_|SY1p|9$~DsR5+xL9@s zB3W<(9zR<#=&oKduBl?w zW-uw0mzzi08+{N zsAP>)BZR=iVr~3v`JnsxvH`am#Y%)gU)GpyyqN06G+1Fed7LNK$H7x`Zm<5VPg8Bo zqA9wq?s^Hg)sVYZz+=r&!O_+^n?sO}?KN{*eE^B@{(3p->sJrDjPQA1w%;ASOV;hu zpgW7O%dWi{cd&pjX~Q>(d9zgnPLHwKxFPt46!$d)&RxE0&}|lF`u%%FZf8 zG5eY|4ec1^wXI@0e8z7OoF=c?bVp2JS}uwMgwYNzsw7{H;4cF4vcjGerQ?c~Ma$#a zEqRv{j`o4__`oC2F)F3Swrj4!&4Z=vu52-7`RT2B_uF;OO%~zb6ZN{qGjgSEC$%<& z6{14P63NOH5RhLuF(W{fagT16g)f38$`?u&RwLJBygPQs2#YUj=2an6a09YxmWl;? z{lK8xRqp%Q(+h5XZ(+nH!)7S?{Qy8#w;VuLuU?&MBVjf0@O5IcBR{)rd*0o$L9CBu zL+%;Tq_PupF5XM#cb%6U7ENrL;bzwQewxqNG3q-;81`e|#ANx-tX}6`R|g1)^ptpE z$2vf@HWE^@hK+&0T$}gSDCC0A*(E?~Xjkz8Nv)~%}xKvG?yFzpJ~D?8Cwu3TBwg_V4s%vnxuieaw@ zx%!ujdB0pF40-irB@Ea3$v6uMjt?pfIlnu5?$n%HFgfRT>&?2b19Lq)w5gqS$BV)@ zB8;EkS}1Ps=ty^GreubZrL|37J|)0+gPi+~>xX=By`kR|+NCGs%bZ||FqiqFhtOy| zk?^`No++ySReUNhjXUOVVsBtZxf|EMLC7?4nLu9pxUApJQh*TG+#z@@~^$>2IIzA-G{}1dMDE!b zR92`Lpk%F3(RJRWjN7BXSn$zBR$y^lEu448!** z*tI=tV%8ltCFf@J>=-YrB`o!GD+VI>OZEuS1SVTdUk$C)PE7As`@rjqaJd>s zh!L08N0WyP!V@pF6+p@r_o@XVQ6Vas=S;_{fxO?gPwkhrLq)WRDwyg<-AyD1p*&3M z%ec9db8g=W_3g+Gd27A!3Yz;Z>+^1{07-A}@Fv8l)3xu?K9pkZHn|L6AoPKad4FAX zwH}Cf({t@}1V!fdXMH7CUgx98jc)gLqjo7BnJ6k{p=P{03uQZU4{s{Chr|k5Gl+6l zU+eC%0+6Nr_x$tE*R(z=3xeWj=svg1H}lE6=e8C62CJU9w?QSgH$3TLWeVkC$hp}F zx{1Z+OpOwyN`52YK~-AMz`0CUF^3w}@H_GIr^Wi9Y!6M*1`3V+-fq3l&Fati^k6j6 zyx~TdFPK;ghL7=}#|Zwc7X>($ir}QM=9U(DvV!o$(slsIbL9XetqUs*@zA{IG5HSw zp;tpbDr#iKjs4LG>%)Mfi`PI*Sm0S1`-+g;N34&Y%rLK{QJ6xP6qR*=KHuBuzy zN0EtpG<3G*8fAtZP`PZe0OQ~gYoXxo7E63fKAk0%2`qH3f104tY?;?RWb}?`GG(i0 zJS-&dx$%8y3O*J>8J-Ok4HXb!9~5nwhdLt#eUQ;3>32qvZaFTqStqu5(2<3tiV5xsP-nd$Pq}xsAzepJ1Y~U zyl)?p^&rZ}>g~n$7tH|3(xpqaANtL)tZ%sm0zK{jd-~}eK%=%Bt9aR4MaxJF+2tu!GR7kFFPwk;@l~Z9M7zx2FKe9=(l8eYA6ehAb9PDeJoyXJQR4(={wI z!p)W`?q8v&$8%eY@xWorEKN4n@MlO$V1DVlt?S3a=-rAR(GFHf#`mom_H7aiLE)ep z!x|Zz3(*b`R1XEAiKpxNUcIaN2?dZ!Ef&kU9>V7@31~b^e~*D6SJ$tDLdka>YMYhTt)?Ws@HD^F}8?OXVUJb0`btsxa&J%8@G=SIl*b^*v! z{=cW5O6|f*zE9Bn1^Sr}6|XWI4OeQ75(b~C3Xq*4%lyIkdZ0gu5Q z+NQ;s1T~P<7DIO}2X1I@#`y+HJ*DoE^>6sYawQ9EAZmbXyMS=@trPHgX{a$hlI<`P zbIx~PEbjidk|y1`VBAQ+C-xd+afH?6JoVFMuljK(ayPkP;@F1`9r&cNi^lzHk^;NED}bs`eBD zN)-1*Xmnv%FM`oKMyAvRq}b?aVy2jF7X_wxtzHIN7;!K{!z#FK!Z-QCaL#*DY3aVX z8cst-o7^y%m+Q<`C<9ceFh|AT$;EE0TwHXo5M(!|l_$o#V|)y?FyhiBQ9kd7(^1IB z-kPY?p)&GaFG)@+_;$EaNx3S-?&BJYiyRA&Qfv^Afwj7xQ*NY*83?ub@y8!`#~pWE z=_IHtU#TBR_~^|p=BTtAs_3Thwex*X&di8HiV7yho+HI2k!isHZM_J_=op#ESD~e! zeNMBB;)X}rTE~btMo}|vdx82f$C9oP@ee$ftZ2WYs=a?_4x8 zi;wstJ@#1LE-W?Gsf|<~9c#F;%zxFDl!+Ow(Y<~J-H02Q`QwchBHv-L@jZYlYXbRU z)Rq3LVTSNBvrs;O9+ecR;$Evl6G8MKrv1oh0LVl9_voXKR@55HJs&YqaXpu0otI<; z-CVfi2thI}`kBo5o~F+JL8JEKM^YY!agah;yosXbm)Mg}J~=|Rj~almbFtRmkE9hs zjIS>tTw}4!IGUwmY4*NGtu$8*q83J!bFkIZ zl}1Qu|3{%>3UBy%->@*-*k;;+fq~Hl5CH+wyza#8dFY{sO2Cjdm6xE)B)Qn!h_tlO z0;%lAGRwR>3Gmofh#Gd!kebQ{xt{DW14QFYaAaMv97%tJu6gEmpiqjxx~#30?qr0h zoM)y`F>?r4Rv&yLrg=6~zl0=~9jEZ#8O!#zTOiPb9z@$nw6I&HsrdnqlTN|jrJQSFf_1s1*4|cFKdK<|B?nF@$?Fb+<<@$E%X^g>{ zTgn6t7PRcQa~WGKU1WMhBi&eq!bl4w>8lB>4^reO_k`P{(g#F>SoVXBoOz=Fa(^WN zsRbevg|m0>jd#qtsPxZe3Oi1c8R1>;mVv{Hx$4yJ2}=8C(k`sx2NA4PoSlf`k>6qI zbs?^fJZqN2Y*8g=)(s$Wm1|O+0g!yO13=pv>JIz}EsrQpf!}dM&v4~Hgg5KKD_1$^ zdIBvwO36k6fZRWt0CKN~tOp+~>%K~YNJvw1G!~+VwOSg%w%KT;E6X-QjRylH@*VOh zIlVDo)~2i_8;gnu)qDj&Ci5acKBQv2J*>D8MT^{fVzF9rBQzKlD;95$=cdzpqZm~% zOTFwX>^RDN)bFnn3BHT)4#hOH4czY_8S36fPPwPSOTEuS*8TVUE>^Q<&8oVQ&@Yf| z)`QBFY~#QVRL59VqyXy1s8jNe_!sTOr1~V{wcCH28cW-ZU6RBR3Hy)>Y&y zJY?JqSq6jzCT53S|7q{}>eZ{=Bab{XLZ*)jfXE51lneRsnl)?A``-7y=l=9h|8$3q zPL(zlQV(gbxG#kNNZ{$f^n*Ep|+tIc|CDsro~7mEcp;(a0pa^#ep+l{*A z^w98%wkE@OVN>*&%?mOw6hT}d*2nbT#;;?$%*D#VLb2GSEt2l@WhSfd7J@)}xsMC= z%9_|+tnxkR1Cy&iWg>`Sf4|XFZw5f_Q~=S!ij`7%!^@_i-c-5R114tNya~nm$`=AK z&Wvo!R&7VRt&M+_QTjHVl@(eym{qi~ zhpRI1-##J)j@u<$%D^fYQ>^ADgeXw@{PrmB6>lq}i;%WcSG>XSO zv06zk$(2eFq+19SPz(U%&QS*tVQ>nlTtg1=-FM%e%KFgBB9OlSl&srlQqC9u@~P{E zC=pC|RriD#b(b;1J$_!mW7+n`?<8{ZP~n?n1VsAtMb}ujY7xGqPZ0&&z9Iw`P0M*L zvhLsU?hM+R7Tih!=&ej|&RDLiERP@qE7~D=9+mZY^^4U#3jmo=%)p9SDK!zlj=KWD-QXlt~(jf}E5Q8w?FRx)L)2W~S?2LGJT2 zBIKUlS}2xek2UBtJRd6K$L*4HXYShLgU&9FI~EIUMjfBh8%4uDED{SyMBJrctntY? zx4(dhZX!(M#}2!9t{J3FbmTT{FO;f17*64PzXTt%}0az`ZqNrFfrzHrar@_LpmoCD?J51yQL)3Ug+3V|CiDq187 zSC;Jy=}z)7KwxH2`b z;sT0MR2ZQg`ESUdP(0>}>^wmDX0Gtf#9Suei}V&qD8t7JKsUM-=YofG8r$)iy=2I`)!S15J_4f zrRuvomM_t&yZ!Qb@Dwc9tWR&Rm)Lr7-+lLuy#*rRuuL9)wsPf4cgG!fi~x^ z6LCY7IbwwzKV8hQ$yuM?i&>xdZElO&WE73GIU=jD*j?)*27lZ!+4#P#5H0!MO?khu zu%rT{2pESvwVSBwr;FBcv}hsyu`4TIh)V9Er~@PF>b}DA8W6K#7g3rHnv!*k1VHwk zm<=}mr8xJ@a6TuU=$;io#sb;uMr_hI>U8&&8wnm!p)kUIg+iSP-2VfjButW53q)SN zTzG%KNyDyAzdlmxA0$4p2L#z2%kVL?K%g7E9dySXRb5z7(1lJr&CMV+b861Le3zV0 zW6Z^ZAo7`5g?RIzx+tO%@pWfd((&Yzgu*z7aNAk|j{7zi+{*1cZal62Tf>Wm-M7R- zPMDGNLYQucL;2x!@Jxqn!B(YG!9n>qEC1~$s^jqj9*4;J@7kB%hpq8MXh7D*DzU(x z++1+Ww-v3AQtDgA7DsowuY%{=e9gt-5Lr83=@#qzFaeQWa@o>NP8&~z?z!il5wd%% z0p!+70Fu-klaC}1;J$jr%$z%MM%MMoq~(VSA`GGtyNoT8K7M>zBJ12Un?z&Y8hHU& zXuw*avmtvXj+xfuPTj3np0mKBidC;W1xyEHxgbWjJ4OKTq?x^L&%SJ_SA4z1T$0)L z480dy!Yz)|qN$&5QpERVBx?{RzdH7>dX0J*gj0OV%= zyZ!dt{YYCIOG`7Ld>tmH*sBCYPL?4`r5oH@@Kr^lOcVr0qteDgvp%}W3JIRmvw*Vh zxLh9Ely^^WE0k(MrB2n3TYVB&IYhLuGj`4SI0ALD1_q$OlDiL))?7t*s7VFkZu$!jGSy&XsUgq(OBtR8DE8f(8p@IzYmGv zdsKjApfIeVXI$+9no$4?GsNs~fB=X&WU9tHhr!d~EYY+7svGm2k5x<`xKajkkwxy|tnn zs~m_#!D6Q%*eRk(zF{v}T!OTHB54M;<%^a`9CWul-tM+UeE(({+vNGv0v?Zu@LM<7 z+^>=kf0}6XXV20<6DEJd-hE;<^|-nH znKHnWj#%}ZMY(!Je)Ez=H8(1NBwbZumVmz2B5;u#uF9IrICoziop4H9`fI_TMO65dyED|{ozSnk~2>+E4c zA1Cckk@t?BmUFKc@K~~YuTR+>ar~6RAQN@ppWn$=J1)~zea>`t;khRy35HSm+Y zLi918&lmR#WV52NId{S|SrjhkC+?n2k$Y-u}5{)wJdU~?{bJ7)$PSz}V?XiV`R%8-*BKp4=t~Di+8VnX@5UI%eV^WF{6%*^w1scUXb7 z`~*?dTpKq#MmhmZ7M0w%m*igF@lwGZJf+|coD%uHK>eA_2!N>7GbD;NJNO9y@V*nB zo7qcgkeu6V^`N`y#k}xM2 z%j)IRbM6mj_KF3PEu~#5uV+(O48!YUIL7g~fX8)WUEQ{!7(eRoi#H+qg_CXECO6(~ z-!b9}h*8`)DvDztBv!C(At#E*WSWYJ0yMub=Z>11b;nKX@hQ90dom+6h)lNhvz7Y9 zRHC?eqRQ%y1CZYXAfNiwr`&)4_kY*r$_jU!m!4*ezT=|>f$;+#I7vX{rAS7>J+#HS z&7^E*i)$v^Q*2IUB^3@L5;vY#eycq zI|!3Coj~;2&we(P)g1>QACqy>O*h@-o_gvjw_w479Y7-qBq4}o<60L?ut(LXXo(0I zyGf!|PLcQK$m0>RC?DADd>)yb+oe!FuIZZ5dHHWqxQ*7NfE z#Q_lrL(Z3TATu=Y+{pbRshUE!M%l1qr2D?#3nl-p-&SxptsQcEut7?0Hq~~^xjkzG ziHr9a74zc}-t$D5&yhuW=#-rAx$&S_A$Px&cMk~PJR`?lx2@n)@CTzJ-)OOhfn&ivEd2hhWrObK zb$P#;CflpYB*s^rc#LeM&BKb-b9>{>ukZ~@&Ai^o9n|mK!TkV;bGv7Wn+=!ZANI^- zT2h+l=9_Q!MHGcDZ>Il8J1r3C2QoZ>yzOmotK!#nJAlUsZL_r7cK9=1L6qn85&^0A z?CWuhXJmXYgcr6uU*VP1MvBVOv}m0yGIHg&7vg+{VGys*7?~y+E7O9{>`cd?#D@si z^&+*uDcbeazKm}gh3~gjo*(2a#S5ukN^I?vnLR!|{As@hBV~7J z$Yt_{L^bBtW(oozfsAe$wHiRK_kP;n{mxf>JmQEWDri=jia=7*+v0s9nlFu8OM{5e48BnmUtoy%Dfq$!6XNyzqF!&vO}u*X25-Fa%V(5G2v3DY0a4zF2#P2oPeB$0 zVNp;PVF7nnb{Dwy7hOwjZB=*AyL;yR-*>iqW@n~mx~tFM=bSp{G~wI2d~Tw>K{-E> z75p_vt3K~|l}k1!K})kYZrsQaT2OU#APIyY*ggEfPAx7j9<+4nQY(Sj$U}Wx{lbe> zRq;iPH;H#5?F?t$M5nXw4&}L;1!d8k(9?1kg9|WT6=0qZr zXvNDJv<}~(-BpK)6DP9kufN{ywVqok!iA-6rKqoy@6Cx&jgxR8;(8&b^qWM?c1*hN zQEJ|m(#=xP2}HK#7G$B225zb=Dk_AOmzRreGUqvNp)bCKNk8z16k7T*908#02$&w{($BrFV zSCoFIX>v*Vb!(a2U<3kzkS+^E-LLMtI=M|I3JVJvR>Z10ITQrK57-*MS&W3j_uqd% zssrs66Y+X2fj}Uni#XJMHTH2bkbaUplC86_8WuP3(8Z2U4hMla7=u5OK-`plG!};% zKzAb*vO)+1Lb|LEU1zD}Atl)WfvCDTYy>h-63F7ki(L=2&fsn&VloTK3Ly{(L9jkd zZ_uhL}(r~jt+*sYqDj_R`Kp>==JWMyZCK5?D zKKS4RwRP>GFj^qMtM~%~*|cd>%od23iRcv%Arc`F2+1Z9+s&=+x=OY_Dkvxr1fuHT zP!b3~P{;?F^X<3avPFv)c^-T>5{YFM77+)5KuCr4q3bf6<&la8PzW3B-n|>Ua~vHU zP6Bc8GX8)-ii(P&&P-2)kQG885YlOV$ZfLq(7t{9MEZoPdqYhi{D95po7HR9tYJ?) z@r204bhAKAR*2~&+2S5<0d*n~0)dc9>qB*2rJ`)fh!G=1AzSENNB4%CKpgbtQi#=9 z^XJd^WOAKEyz)VGbCGgH2n0eptdDe&hxGC|v5!JW=O!IFDLTkSeDfmqK_GrD1zS9W zR%H~MIaDTO&)G> zk5nfO!tz+SaA7rFn^6S95AH6$`4=isf<(MbLO0TgH=s@=LLd-QK^}BQ;Qh*vCxNQ6KjB$7~U`5vjVKG1DYabJx}rPh-#+G)DNwP;XfEc!z3npvMRHjMr$48TL;VhD3>t&R@MCSL?YDJ3B`fd7)6d55?^wBVrMj~9`BW?O zly?s02lWfSnRej70XApO99MTzEPX*t^=E7@EplZM2!v>?4_!C8xvx^8S3n-c#l_g2 z=IGYUCJ+bD;m_>Zv#VkuCUP*Dj&3C4wIa)Fc`p$Ofk1F`U732Eo8lg+ihaP7qw3VG zAP|Qs^TC|&zWc7H$>_)O@MDGe$qgYAArJ^Q^3X}d)aS!gZ6g`LVx)ry4`Mez9o9!W zEfC;O(h70wT4+=7URH*UJlq;m5s4581UFq{N)}X!1Ck2;fqGYp_3@{49iyByo}2={ znN(6z!rpo39bZdsqltsqtP;Nk7l}j&1R{@Uk_A)cx&jbCE-fv^?j%Q-W*31t_yhjH zjRg|1oyb^-#4VWaw_X;J2!W8j5tmT?=Q;n#B$u zKJ0o@y;fzpSsprhsIRq=6@fs=ZX)qxd8E>H1sw_UP`V8krt=s@(ycHfkw_!HLDQ4g z&ph)C8$5V$jT50K7pl+May+D9Ew!7-*O>A}+{P!wj=et}XXkxdTI2bd9qO_^Epq7q z(eX>W_2HK|kSr)A6?o%~H`sH}i3L|ESJH)lzv6TruiOBfuwt&DpX9q!;l{e=nrm2_ zHf?IWPED*aDR-|VLbqf^DVQv^YSl85lOIqrmQS-c=a}tgqS|NDu`<8k@A0E!72RuO z#R}66tX{1`Cl>wllJPqV3ky}()syKakF>k6loOZFH)~g~Ud@IK8DcaeZO=uM8)A!n zxFs2?V-N_0?65%OzR3lij)Xzrp@$w~Xv_y)>FCU~BS`ix3S*UmWxcY}<=uea6Q|b2 zGS!z+YiQ{ciQHENLiQ1dTYFm7g_WvRSHfqtYnQ4k1A&|Xoxn1_`3U=pndV9e(OMyX zD)stluS*~hGLJmGbVj;g*Jr;i+6Nd#iRRQ#DfeLCB_cdzvXdyz@3+3H?Pdx=B{ zgv=!oKUPPoS6u-`MMZ4Jj2YE*BoT-i#et3ll)?BbL&MM)Ki^6$Uf^s zee4$ZkZ!9~G>MYH4fRJK3FpjqM@JHYSWadM-wek-2n3~y{oXlvnWAp4EE}2F%v=J2 zkU8WLZOkK834_3ApM9oUAHy9T85raS@TAU*L?VlJ?%df2By!0mmsBlhGbwMcfVylX zrQThrL38y^LN4Jmk%ij4N^S}1!}ML{Y**Pa_WN(EufK0sJHq~Wb{*C#FNa;;IQILa z4aG;#l~i*qzODS@+Xxv9s&>p9c%~RItO9$Bmx}{IW?d>&-v2 zce7m9{FEFz$tflelhu)~LWuyzgZ066As#M*ZVcLS3~JWVqg>W==B?Mgdv|vC-FI7+ zwwsc-z1C)as$?TqY^iIp^~NzoBB{r?uaB_b>?pVXABuVKS4SL0pTnMNdkX8^sFv@~ z|N6(H?2iRU*z`m804e?_|Nj&xk|#Ty!s_QnTt4#{`^}eSY+0$h+vBw#Dupoq{hvRp zHha9?DRjb}tRqF2VbjCrIo?jgM}GiT1kZmFXa1BaY|p5SeKGYtFFrcGmCe)*-< z)Qq-LuUjl6mY(Y5(n<~X{&b8D{=C$cJZdWXW9;%ZrNUbAgBANKS*OoRUCBeo3wtZs zUHp6D``HL-!1|v&$wSBQzo}sVwW*9we2UywZY4_Oek$W0n>TM}Z@&4ad^pU}i9{e# zoajk>vugeN^@2paCWTv*C_k}~c!`9NJd{)(XG7MOMWxtPz#{P?5h(iWH^io}K@Pv$ z_>5pP#P= zl4_8H7nwx6D$8#>T(U+|ga6&a@8HX7-u+#bm&4xfcnT}MwtbIRQWE>-c6Y3YCFG3JMB*<%0Mn5?gD^SS=BzP=&cC8Q zyRl{Ms^<`gc<|i1LLr;*1uQbzIZm_s5AT0~E$6CC3;{a)J^PQbW3wZUjUGMoY&CDT&caDWPTk&~s$7*GK{2S?Ru9MPtw=d7%<((c8i+&QVqc?UUWE6EW*G^C{ z10;*ms(3;{DXid&@fUt8^d+N_tl@sC#jzx5J}B3ne!kEnkL3T#s?IxBI}{9tzKnlu z(!a=E*nKpMinihZQ6wB-->Tmolku9FV5{dYU%d|0t}*0ytb`j}API>}Rw#4?Ez~^6 z#Iu4s1P1T+qV>Cz4(|E8uznDx#8v)~04f%_oI>EX;PwSkCktTjJ*e zPL+5FRP5DW>xeA=J^SujY)-Ya+Tu!A8>EVP{BzW`r5iP~n)K)Dwc+TdE=f% z2h5yeZWXlE4VrVO6($EEuTlVAzR`K9i^beFPSMoVR69X=2bnDrCs?03OSB}Wlz)j< zy%zC{)knqHZ;%|0$G)PME%L7RjrLPFRXd#5eA_tf6HqL{FnMo;iKqD3(C_yH@lHOT zmVVsI>}P-2RkvoDf=GOI{eQyACk{6g{bQG@>6E#Cb;P~ZYKNKR zyUYw}QJPKSI6B3ufgIHYRcK6RiTZAFXFH$m5c!4^)xi0epu4|x)sg)27#^eePb~da zB@N0#rU94k^?g-8Mw~y(sP}Mo>-j!XWI5(gN$wOH$u^i~+}Y$!ke3Um_-cr8gfK?X zevz&wmD24B*o`xuI!l1+9ZD0hROZIQqBM({yGm^vIIR644}!mbcwMU7NXq+FA& z4c)0*3vsJFUP}A=oq0A_$^LM;*IDDKRC4$L*+E~B2#&UmaX>?W=S<_w2UQl5B={;o zEzikhX_M4vE%Qw?h{8)B2KJvca!HNGYw=gko~U~K!zVtGy2zH`@!BrPy}vrLvO~VQWO#1unBCZIP=n}uA!ZV z(ht0)I>EQ&AJjN#0@tyDXiXjHO5C|F33PCb?nmNyQI9^W0a|uc5j(^6o zF?bSK8Z_wAy!TomB^3IEElVO_^Ma+1c`3wxriO2~cm3<{Y@@CQb!jyGU?KUHP?9)J zNy#Zq;I_K~P7g_zKvWwoFz{T8yw4QHP6>O54Uzu+DfEv0ini%5|4s>{Eg5>H=J1Vt z5o%IR=u^(8qIBra-V-t2s-BSiIc8DR`jn7GJE#J|PeuI`g@Z&O=pQh@tc3h#kqtB~ zk-8N{xH7MWoeT*OFN<-%^MZg;#UbmLw(h`mo9Bb=*X`^!nLkjr4y57uGkJq*d(Z8h zzH%;$cnGr>Xznl&6BOgpK#p;B)62)D5mFaChDOW|dQ7gZtxdie$-X7$G#BzZAz*B! zPEZr0`~bZh=3C4OFWl2hA>Fb>N7*YO5-lbC{FzUmBJW5RF%fQB=Q~zkAcq9B)IkGT zs`&du02HgC_-IWif-rI})`fNWVc3b(quR}*GW&Kust#`kec=)9%Q<83s;-K`Q*^s0 zUo86gBz(o=J)?;F*zr=UQWJAS6c}+2Q5nJW67+dBmQ^aV+WuGk$sWF?t5;R~Ft)@g zr-U$xZ%dL6+WQ49`{Y@a_xpj(k4WxS{wND~8t(N%d3v>$96lrgjYX9K3KMv87?5#T z=;z^UGu9IMC4h9(Zo}%t;+M09fl&Snp3NotT-VHefny0C$ung&XjMsjJT2$*3-RH5 zWDUJCQtQR4!%g@9+fUDmyXBh@u>V`8?Kt9b(#iCbFXBv6(|Z0bC`F2sYE{dj>zG+1PLHJCfo-}y zMFl^e@jw6f)41t8gNbx-PQm<lvsGJF#S7RMgTFmCN}zrH+V!}PaCTjunc@U63^LM6VM z)87>g*h_`;HRJD63MLa#)?I%N@d3M~=EO5&1Kx|nWlzT4^X5ByyEkII6s74u)86WS zww|vlCW2G_cC^xWrqg`?HvPm44ax5eBA9edY7=*bjRQzqPkp_P0F{t1+zhp4z2RQ( zwpfl+&70hoCi-Mo?Rhjv z`|YU;QW1lu_yl{7YWwjE$(>4^6+5 z?y1k>5&ZD-`a*-P_ajryAs7QyDKaKvV>HqA|KpkOU^A{yg2-657?u(dZHNFPK-5mR z@U!nO0Xk>?WoUvd+`p%TPVLj#9B!OC`1Q(1z|dhZyxf(PgWB zboxNLjzL+|DT7hDxy*B9(?A|=6i2q1bc2!x%nI&Y$8qbU82F>H?uM}~2cZe^p{B0M zz~I|U8`GP^967^zUVUx zlcV<*K6gpF!IeD5?W{grFR@;m8?XIdXje6IPtpM`s&=NGhy0<*xa0g10KX44xNjwE zt8@;~=Do_^QVQe7t@Bjo%h0kV^s>)yAG&pm2MXCufDbjr$mbI`)=)-bQ^vh3{YgA} zk<{D8z3Q9tz~SFN#LY)mN?nPQ$mR;Uhd2yW$6~NC*ivUV^)B{!zQ}NVg~(#siLv3* zL59bCFH68bXRT8-{Qg#lT0OiPV7Z;9{~*_LRatJHZ!y{V#$Q4$g=e3~%K#cd1XBkc zFUfsHp1Gads_|h3Zw|$R82`HdtYQK`p2>w6v;q864k9` z90K7y)Koc5t?bJa?<)y5hZyVjJPf#4Nx47C<{2)=m2!aJx308Fr%! z-XoLfaf8&JoM+L>8{d&>3;7k47y6DJ1y8@8B5_$9d#8wSMQ{Np#a!Q8BMXiaO5UNn z42cqy#Wk|6zQh^s3X~&k`ES|GUk0MlnTnNWLpkI)5~}b?dF+ZOb@}yV%azVptx|VrL77jj&<`yoc_fuoo)$3d z0W{23CrOG&p86QRup zqiX8EShv2Zr>T~RoQdT7IcjETfVND9(L3e?CSq~N*rK$F1+4~Gyfzb$fzD)sLz2#l zQa0gt=^q~Y-_<3SEuUifA8>J5#HC@BHJsJF*py{8{}yg}A->S+V|Z$`4?m@m1`R9@ zpIstAA7}O6aGic4x|B#PY$zA=rt*rr1&!h$3NjhYQvl?cDIoK-{f1Kc^{==AA0>6c z6xpL^mN-bbL^cOSt+ZF1d(1Xs{rD*G!kOc`Iq5NVxb&jd^lKZ>CEKqVoQhWK!=DB8 z+SC#wcAp`HMN-V*BYM7C-|^dw9cZS}(7ZL&hglu(pU8UFG0RYV;MIyUvMu!2@K7)q z;f@eYa@_5D3SM3aV=cv1WX6!sp%JFeP!t@e6fNeQAFFimYW1FQ6mc9D!{|JvjLW}Z zm5~6RPge1_fM`c@@nK#m4vdgQxbWZ`qb{#5|B^#(CreFWi4n?(Km;Vss6Se9ER$V$ zl8$Ts$9*{{RvqTeAEpk&bh2@*cfSFW8iFKV4W@Hl`x$YbJwy^INktqlr=8V}bu3XZ ziSyVWtMF#p$oyy%qF?ki3g;sx(<^q=fbPZW~jF!;@0O_2HUI<>dPU z-=DNQ6$9MWE>6|d*2ANtl@~NyFd|Y=#PJH_xtbl@f>Tc%+xBKr8AZBa1upFB-tgb5 zv_%9r7OjHFoqZwlE=z)!^#K&rhBf|kfUgX!()0a< zZU+TwbY7@0E{OXE)y!4f(Fe~0&@k+u%);lIq;dhp(2y!cRcB~ZNhyL-$3lx=$#5+X z>AW6FG42^=1UFB8=yhl%!y@jnOY;t#GIOyvi+&Ws8~4co4i5aW=t4}h_PZ_k7vRbe z`E)a?u|#Z=J-RjB6c0V=wqi!u++nm$?UAb%&SmVQVM-yQ*PV(P@4>PE%2i5O7HC~e z5kL3mvgLut!KizSZWNpxh1geox2MG$<-|8C`5y{rAXxP7>EO5xQhZw|l!rB?&TmTpOQQ@Vl zJMNat;pHEJ%989#s&{-B8UqV)W#mT{hP4>xG%(5{{F5DQa#pIn*YQ1))-$Qd;(H#!Ai+D^nwgY=JZTK~i?}*d(33$A7BJ ze|1Ds_zXt)?qzX-vdzN0ze+VtHf3&;kjomrHSJ`}>Zhrk10K7#vXu@0h)r~I;QblY4?_wm+Q<_#k z&vy3tpsQG4pFKwVT)6TfRR{|n{1-Z-4^++IhIV2-!(POIJ^(eg4^q+7!QhbALgh~O z;3i6zoCoaTxn?c?_cdU`D)R0YUf-4eJ?>IQ8+exqu5(T%8rt+xV)L_-)YfK?KLRXg z@S;;Bj3}y3LOE|TLK^()yVoOp{^48weAPTh~97j0+df zWF3NzQlMcW>xl~|zDM-@CI{z0{cS45O$ZB$9lm$}(QQTIAfj-Ja^70{Ne%1VTjA2+ z18UM@9DQ9?7c&YBI-sf^MRD;mu-jV(K7;_odeH{3e-ZM+|Eb9d&~axx5N^3(T}_Ns zxIVc-gU{{i5PyH^J@`ju6Rrd`cB76Y6zv{ec4`fN1LAu+5Nmg3Qna(JU>1jvD)j+a} z%s98-Y1fM`A}k_r#uWA0fhe|xo~akd&5b^^Y+ckZH^;(a7Z4?KJvM_L$Q!{IYi0D z7pWAyv#l#o4<^dGWzxihd29GHF_%08yM-kU4NW-^88_qYmp3w?0Ck!z3npx(nz#ww zT&se}?-R{Z@sPlb7bcY}!t~7igl;Zd6#&jN?&7;WyK~=GeiqdquQcR^mp+93ty)PbCA71C|lDIIt)H7#R-~i4Ot#X8yI|59>QAUdCZe*+mh+?wGS0 zjdsMmL>3Z6fyED%G~szRLz%L}+mSSmtFhQGYCO8dkRXsWM*uNtz)~IWvbW`36Cn^B z5~PlBpNjaRdsaccvV6<1#=h74c)g7UcF@M zwkzs%nF|h@iDR{cy3tqlr8f`UxO*%`GzySW~^M z#5x!e`zcKhz&o~)mlt|2Snl$MEoH@+WIOMdDvXFNH$nANlebHKx{4#eN8z=?>@pxm z&+n=0l>pZ0k4*aYx7m593{&TE3InRKrK}nj12Wpi3DPl8E z4+GRbKBmgk7JIwhGhOS5$~F1+Hv63205-p;;M{&A-0&IIfMxFE2GT_SWA#6m@tcA3 z^VAo&C!4K(v(<$Hv$5cy=~UylKCir-odIr*bdp**R9YI^bN&|^vrt0S48xOARU@zi~y`SXfD4fe0$GV;}3EI35EJb?_pv+h9T@?_{& zJMF~J9X}XkIlnas7es4*m*Fww*U8jGnVDH5g%yyCgRBW}TV7p_6f@U6kH05P5EmIq zrfXop^%A0a8aF!noXY3M1q!2Nr8AhBYCi$>%aFP7NyAtOv|3LpYqwVFCcz&^vq ziN5T$L>_8sKD{TYG3b_?$8G~UF6#JR)s1sxmMOb)Ja`^%7G>@BmK4~|vfiX$Zs8xh zg06}=jTu~J;C&l|HH7qy{2I6h<2T|#S1nX)?9Mo*mFEj!Up4EPM6ph%)P5O$l61b& zZxy^`uSj^q;r=Cl3N~TVARs{l98#=6!AwL1xv4uRV4`@L~rCu0KOqa?46E|oP8`X4SC52647 diff --git a/openpype/resources/app_icons/celaction_remotel.png b/openpype/resources/app_icons/celaction_remotel.png deleted file mode 100644 index 320e8173eb90df65e0363f8784e4f034da0c76b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36400 zcmaHSWmKHa((T}byOUuU+&wr98Z=0P26sYm_rcxWHAaA7L4(TxAy^VTxRanEINXQ# zob!G6$31t}VvW$%UEN(>wQE;Dv06``;N#HXfIuL86=ek-5C|pZ;SY=n{DSCuJPN!~ zdny`uK6A14^s#Wa0m)jsSlU2UoGk2YbZjiF{oICaBtalF7Ki5so(3A~;#MwBJQfda zczm5)0cj9OQpVTS!phOc6KZK==in>_J8bWQK^?56VEV!ud>XFuHuet6{_Zx<{GUFz z@^`cnvxdn?LnVF1fexH(JT0KUPOqFj#C@e;|MV*ke17|aV5XlOy@ zUEFP;!aR?;t@wq6p(0{D{Er2N#KgFu0(|_UynLd({DR#4kHv-f#03PP|9-)M(cGv{yi4(O$ui3>FFxY%j@Ig!{Z~!*$bD8%}%gM&s^FOlI|0^r} zUu7T6!O0aES;5BL!P~}K$=$^X`p>k*9sav60{^wYf6H3`cU=Vkt1K^|4DZ9{{$HE@ zpPv8=dieN1rVD)eALF-i1}xqkFm0J3B3uwCu0%ya_POu;uR^R@Cf&?)$0!tQ(6E>$ zR1nW$kRjGO3WZuLjEkBWLwlxi?+T?oYCxFxgK`F)LP|lnLMQ`)*ys?{PZ?(3efDAb z_1J#ym+HOBy~=~V1COHZ&PPiJHI*;6i|$p<3oPz0kTNjFhgW%-(Bhl!p@$c!|G&Te zKmYlT)jmQaRJ&`k)}k*fi9SntaP8QD3lcwQRqD%?CdorZC@oSoM4xJtS)Igu%6}1-D%}B^Pu88nv(7?17V2O z8b61hF*78+zO(tej+&wxPpfs~#aeuO-K=H2VmwcqedAFtZhPH?9IhfRPm6iupDDE` z^cUS&WtZE<%Ti+!ho2`GJ;r|z8hd;t$EUw5iax);JG?LJy1zY~T)z7&B64?<-K7w6 zcUoNTv*gF>zhB*4Mrjtv?|=GLq+Ddlm(lq~jl(bxcMyBF1vhS`gi|`rzJh4%HP`Z*arc$MJdw78oK~FPz0h zvP`@Uv2gH+Z%|PkGPVVO2b)Qs_Tuuk90N)!j!R!x(T+==CeVs32R5L+#xwVmgd`8H zTPS^YE*7fc7Xb0oDAaZj2=PGn2dxTytH_h_Z^kX8s@nD{Cv*6%$|lR7wDZ6G{?0yA zSC1MswlX4s6~q(|%E}eAQxdctjVbcqV;3n3JQ?0OM$-FGs3Q)Nkn+blkmT$2WBBJR zGob?kV2r&d69&vEu~O@G`S&4k%S7Snm}y}7?144#2|ov=I^&)@L(ueRt&*j-2FsJB zX{2tB`X(8ZCD*!{;ay2cF@)OZ8atX9YH9@48HYI+t&DmMGU|Ai4YDwDAEQ~-e$Y1A z;C7iI0Kj`YuPAC>6=Ai37n9@$$(@XLo4%z6t zNd572JApG151x>RL!~Slw$RW0I3CG!b>C7rZ59$FLLj;VqTD%nvbt1lMWiWq_WfCT zz(Fe#f6)SR%K{NWt|$2}LOx#K3)TofOq0c)i(2MYngM0>-aU82sKBHWt&@<@<5_dG zY;2q7GOK^vt`FMF0c{=aCGT%e?~qAqRT(d=sE^XV=B1*N7$V3in z@GJ}Do}?q2e-rFz8PDAmlO%m*PdU!Fx)g%+z1n?S4s3=%+wmZ6;g7PI(Hg-uHw`Y{ zyO$sOAC4AQ(%D+{@3p!RNxk(BMG8ZvdBEt)tDELI=H0(=m)BV#^Hv@qtR#>7Nhi-{ z%;-&getJ!QnlYvA_&e`-Qioh~QVngf#xJ23ZP;cK7fcX+Gibh6-<4dH&GVZ+&zUH?(u*$E60T{b2*L{NB}F7j)z5 zE9tFn`b6SaZQn%6^B8PX38fF?TDF^UoHbN5s!_%i$yVz#`tvUZfvu)rDXHX+_YX4t z*s>NknZf?7?2*qRf1adYJ+`8R(yo#S6O+byCsZcMie&;WsXtNTz`UV--|Zya*O=-{j@>I^ z8o>4mTqoq$KOBo{E%nWd?pCB5WB()7mLxl#9!?-WWXK%%GzrOHSLL^;_ZHY%k5H@M z7pwcnv+*-ztczYVCgty+y=~*S?2k#pA(a?3hlC@pBPmUH1~Dj&SK?$?JiR45Nxu|e zIeh{~6UG(oq)z_nr!gai2oK$~kef`JthO?b1;Oydg;_jJCGWl=`a%8g+PizAWcHqE zzwH0yd<#U614Q_9(8L`pO1WNu_w^89+kgGmZ*<>3_km?^Z9hlLpPjMTncgz`BG-s)P@gF*h5Q-T#r6f z8piC#pfG^W1O8%hf+8B5OG4>ea?jXbk8!D)M48)N%@g9sOu^h)DjyeKih%elDJ0om zU3BS99;7?2~hF6TQ;%s z5NRd(5hbDHd}$#x+TpWzLa-Pn)P35}t`$EP`zK{`qSfV}Bo!1iqH@a;{*XVWy2e!m zG@`AhfbFqLA7kq?&pga&ISDv7E(&jpgWJ$Z{Ii%8!Z}BV?*+1SU2;A7K>Q<^gq7EW<$|Q;ETD`gI_(B|jD0Bzy4WDog$nkm$kseb00VCNvz6Zhw%Fb};fKCOwrpd+z6GBuS8DGkeRT0i0rdNl;R_ zZztda6|v|8YdPgaW{f>@`Rc4@`E_;VQtBzy`A?r>v{cfkq6}C2X7_tbamYK{L@LA|L0>xh zj+v0f*PE!2mg&Q3`dSl2GEvn zEtrSn)47`a<@_^|5&FLD7oS-kyDz#S70(ytpZtt@^ebTS(Cn4Y5U4`GC%oxZ&F;nB z^&x%G)wklL$BVA=q%7We5=xPvvf}lTUJ~tGqip1TbC{d(Bsf28zB@dtK3(6);q^{! zAAyI}#!#wrC>1sJ4|!{$H5yZ^MB%dH`E{FZN>r~Ma5Mt)JKf z5uKWUP1XaYY)Y;w~!6lm@65g-=d@H^ax*FCDwj{N( zem?I1<*b$dcIU}ta>&hk42f)&#qQo?6gG+c1$3i=aHf77iSWL`bE zK4Sfw<`d&RSBxo+(1c|tS@PGly6Lib9-7ZysXYAuJ+;# zo8#L?p=H2Y7ufw;a?FCxbeH_u9)k7t@ph5Gt}RGM#8*9zz9ummcQBDwWGobmS>;vf z0?@%%Od_#CYP*6}%Zy)^l&@lu)5mr$BJA14qmjCJlFIMf@Q*-z=AGAvUCVoh)YM)s z8WeW~2)XEShi`9MP4*=<$$A&kgq^KHI%4H(yd4qSW&jo}e}IQc)20DV`BaSMn5Zf= zwHMPvMvm3wLFetuUZxQdJax0m!S#hZmpP}E7ZxupADLZ_=r6LBUq0+T8Uz(jafW&M zMd0y3xl73P0`Y!6{2K6JO;2tl;`{F#)VUkr^OmE;$1P|_C7P%FNIKG>vkzv%M|=W% zD4Z2LK3x|Byy8%5;kdv1lTB4Y)`+jK20t4TNoAH&BrV1JRLPQ?Agr2ydN;)K3%rEx znzzv#oZlaIZOe24K)8_E{jlTw=$eV!D`b=puXm+YRv#n%c1_j2H%U)K-@sE_ws6@(654;vd;Ts>Zqee~fla9g6a zyKiXd9r1{n!@i=-@&jf7u%Gz6Mf4-zRyi1H!YHc4Fv78)&}YCZSjTg2+aj+F@C1{z z$Zk|(BJ9=%ISS>h5Boh>^a(6a3x>WmuhnVa+%d8+e??V6BlI6#Ys(8+4m#&XhbSp2 zeXWHzlEcPtk)IVR?;f0U(HMP)zP#h3hZzyASLY*;;Fm^0`!AO3Rrm$=-1yk#9F}tK z0c>yP*^PpAUixPQvA8w?i3F`374;c)N`)cYoz)PD;mh~0>0;>IhZs(# zJ1}8r1khOaz3%Y4M}Rkf-q=r~Haj9Hu!nX8Mp2Q-$DgaaOLPhB!%7YHw!?o#k%f~Z z@dfS3;8oqgt4(U^Gh!7My?Ld+s#{p6Fl?M1P1FA#o3#LXZ<##giI&oXk}OPeL?t_l z)=Zui{aWtfxZL{S(JL_hXa=Y3Y}26;mHrxzWi1+damWG{aAogA+w8TB_LIC5@pmbX z{?Y{N>s{1SQVQxg2oF8eDv{z{9lL4{xo)<|USG#Mg(gWpWvD);sv(qHmW9_MaUv(q{V3nLjoqa>b%+ivMcSMuPgbLZNkZ3)RU^-ub9iV7z7fR8 zn^U2CT*zk;w*N{r`11lpfFV^16Dl>z)%eEqYsrzWk**1l0mHJCCr$QWIJesTiR2Ar zFtW+i^>thbc1f1_oSbW|dQ*$##4(&zYpVrVm>*Xf3_)C$9Q>zza$EYm)jm~`Vs#S| z%OwEXWq8xA>4iPMYgY4e7C6+@nFM8TL!%C-=k1Dv${3SG<(Ar*QdNNQyQ#|8qaFS3 zHhDwlP-0R6_X$>zGuI(Ap0b1)FcjbZvfJGJ6#Gg5c&iHA*7}@1i_N#{#(q`bScr*bvg#9v*0U+*HiUub1s|}_A zWZJSAtFRvkoC$FT42H62_3-*Z8poC(*wCBcFC#QdPt>~~Yh`;W@-#gt^tL)hsBEoy zWMqB`RAGo$YkT9$K8`}TMh~okl45m=w+Qu{$)bRR-L^s&PX}0of5?)5M?pX!BlxK{ zlbg^FuaJvAdL_muNsZ(d*4EEsQX(^6&TKAkN?nG$>x#K64jvD=`@7)Mj#~4!(3^(0 zIBtm(<=&fj+)#$>rVV^#o-HN;r!+7RU5T!G8wU6T3#9!S^)6{yk3tu+q&Ox}1|NP= z(`4%Fa(B6%Od$1VVkg(GDAu`UHEJ^U*ZiC8NEgla+O_=C(+i7zAT?<#a17IZP~B-L zW3e2s8pwQWm=E4*m;1JQ!TRvvZLZ~AG%|D}iPp&Njp6jf>%@cv4iqAK^?GS~|02bQ zp34<#;)Oi(HN!lRQ;-cqJm1>Vydu zUYg)P4qu4rJ9yfLO9Sk=7l~wLTr`~Np3ZkA^yH*KVUQZ}UBPb9N}6Ww!wKkNTA#vW zf4pnm#&)SjboFCeTSLZvdHdk= zgZ9Id>5GewYkYVmN=iVk)}kH~67cH%a&4jx5Fnx+m5=h@nC!k{`hFOVh#23|h^X$C z|0jxgo(!{|{^`89JccC3ly|-7L+5hHijjPqH`sOarAq6+8(5HWBZ#_=2Z&-VRs+p zKdhxmvz#1H6|6gzA|UHr<~UYUVI7&gy&DzFB?Mx3Er0jik8T_#*%}6zNCJ&TWJD1q z1~I+z9}jUVR2cT7>>&lTlmdmwk<^xIE}I!(2^~N>{fWBM)j%#BpxA!cDZ%@MMPL%d zgv6g`2%fba&Ea>B-U5gc8)HXn>+kl^<7G}9ItDfLIV8bZroL%z_5xEq(0u*PD7{Qe zA_R-h72XJJAnjYgHqIi*VfWvX6^D}()eA(mM+)Fb7J(@cQ{>-d3gK1ZLUp%8ZaCG5t8F&!VYH}uZu>2G*jWS zR#tj!OOU5C!c??ay%z2Gjpe(l@GKBifUNJ4ylU=FPk@TlhtWL4lY6b|Dy`+S&1^H% zyAH3LB`@31QI+NG@zf^}hss1?qsEP3S5khEtEZ5uk`utoS`ZjIfKJAT`?);MYTjX3 zSp!p#UrEJ)`Jw%CFfIM*y}(^osQAaUuu7?=K1yT-lZ#F!FnRmcGQE_4quc9;9?@b6 z=@hvL>6Lw-(jEdC@5qZ~HETPckL?TmDZV7LP>-D?i36&_;H%w9Ib3e*MSEa~XJm1a zJ2}dFgv6J@ddoz9fe&=uc+i2zq}NO<(U9Bmb@9QLSMrX4Zxz@VF=9X;&sDFyXUC1O zL)HR#JI@~v{d1GF?^h*w(W&2UE>X;oj}X=FGRYSVjDTzt@7S8IJO8C8-oYZl1wXi{ zmz*f4n=ks5sGEM%KKO2dPaGbwVh7~Z<<1kCy{N2GG%V;bQw@d8uZC_^Tq<86Xq;iV zUO-8%LUvARpn=_BDJ=L_@v@WZGHdEdt1O)c5pbi*ZhQ*p_B*jhh zFF)9iWUzY-O@*SQ_)bk@N>G@1YGXkk%=y+f@%e5qjv)(Z2w*%Fs-#GA#rY_PL@xXO z-Z(YQ4T_EdHtK?4#8s&LRo=P#CjTYpMm`z}6kF$@^MNd-2e{(IeZbbQ2$bcZB&fOG z+{BCbDu4G5e%8vhS%d}_${>&cN$Id?3oLN+v@(r(MX0j2)|~Hpq8Hxf{-dt3r_M|X ztad*xR{3wdw+nrWaM6L)pgtoVUP@|AjP!qIVN|LZf@Wc#UOK_uyh!rMd)8tS81{*2 z4MoV-@lzAWw<09LnKA&Swog|_xX^F?&t_b0Qi1&{cSn?ZCsMvfI?&?2ruo=4sr7=b ze$MrQJ#AVJ4tSfvD$ryV)Ge{q8^`Mzd9m8@p~h$ z@4<}lNm9sSW6A17`V^-Hg+?5O4<#OL@>-IP~zqV0(6T@`*h zk5yi=Q1bfsLj+2xc)|2LjgSxhTl%t;Q0jn`UFffsyI-~PLVhRK>kz=XhGHas4v?!v zO(z5bzq7sJq9f=;1%>FJ3WFuL%M0OroB{YB4-9DWFLqEYZ(zrT#Rpw|lOO-o9I8^- zILNS$QCL52mO20@GFi{ihP=o*b7-Te4!)SB?5&gpk(w8Ms7|oOxwn5D35yCY{-uw@ z{W5pa7wD}Z&h#4hpIKA8T4(bP*UO!Np5)QS*F<9fE*{!?`SHVEF!Wd@&E1RL!CK$g zE%LXd2ylerwh|0?mY^BV*$f}O##3OoayQ%mkcbnqSc(7zd+001e79_)Yx?Zb)UDZt zd{=VCXIHh#`lv25!LA?IcN(?9SpSN5HC8)!5X0%HEPV$M3^{Qt*PoWU4MZWI#$lFx zFany-iIJjeVsISGODgkACE4^R-$QU6vR8geAT^yi_@NjLN`F(bfKzV=E6>M<^0&gc zUbu94HTGcAet(Jcrgdp#5mA*tGo00>bT~5=w-mxyZiEXbKWVFikudqP zQ~B;CSKpNfd7$s`zb3t{RrA zO?oMb%eQgcc%fv#wAFXyMai|*szh$9%>LK(XXv&05;}oW(R<64QK>; zU%IaLK1}AjbnX^)0X+3VP^??v3L1S31pwLX0Y;<7e-p^x_uvsZUjY}peG{eh6{?L> z$rI#c{hds$qB2UyVSpS1o(0679e|fA4y#I%5o~av7@EA91DNSD0J7=f5HNf@1ajdQ zf8NU64x%A+u{XTV|t^!=P~@9k0_U$kAWl;~~_tKluAw&2OU5)#}2 zz(72JqdIp0(lCL-)Yrl%d6o_%=if}s)g<*PsrEm;X1sE3K-d)V6y8@#+pTx?9ztMbO28kWV$m89 zh-M)x`DWIOF+;}ER{F-)daFNw`ENGprK}(Aq@G=VUdpO`PXe5GZxu)^ zL&jW5Uqpp&2|C?`!W`HG7(tBa65yX!(50XytOhJ8E206~6MAj|DcYPIHcd0hmEJT8Mhed3)Q>PoBw3UrYkg~&LYjc8G#e#<)4 zh_3%#_D_T0tkT1=lg!uN;7v-uyrEyX`KZu^CS2&<<3Mx~RJ3M{EUY{>MA^AdilX8) znR(q-o}&_gYkgK^DX6AX?ynhS!)sBAYIzO-V7vp6nLc6hGcCye5fn*Z`ma>*o&mW! z<-pUm^zjcER5sseT|TdJJw@F?L=p{?0%`k;#xe2zmPBnfu;F=A_Rz(fHov{hR=m*Y z&&7Iq6>^}tCSt&!(7h~iG+7`(L@erokO2SB$*N^7Sn_zCja>6v#zTxm0qcP`@`DXK zW#M48usQ&fw+fk~&-_zi0ZJ`AV`bqdH^n79)40p8y_f>bUfFp*pX>2Wzo5sq;%!YP_9qNg_#6?MBKM6sGUEcPhO^P4Rofa1 z<#_?ASJ!L>Y1?Y&dvmNifBxduR*yQxdz+1WNpE1OHidkuvLg9T-BiwW?Y#v)bN%(SGun{CUEq#9 z{mbhpXhL!DmBV(^L-J@`Ys`Wzi=xcc{;Qtg-9PvyaxbpSP((me*pvZv!*DZ=lTi=vzW~zHhEJ@_P306QZkHhRYBiTR*B4FH_fs> z%H?lGSo2Ve$Loy0&R|O0_UHMqMtF&jZAv-!R{MJ5LZp|aGUXLuo{uEt5jM1$ET7aH#OLtd%<2vpfDV;bTR zRBPoNdrPF+7L8)~D@ro_i@fuSDuS6f*Ya0M1|1pGrF$rP_sd$!e*IhNNego1?(nQHGDX&qz31^fo{q!;&X4s4<_-AzmXvna+A!A8Ld* zIQ_~3cch$+j@eW}F=;<#0QRt-vgOrHE$%_SHwyYz3*gWSl4&)}G+_yd_BdFgB#8&u z@R5zno8-0noQUPDj-x=q;bhEAYl=XeU&fm7z;jnbt`kxp|cO8 zNzSM%wh10U>+>e|#5B1?#@c@lm}1xu-9Qs!{6G_yL9R6gjPie@pnODhfmu~zRoYeX zt;m4?L1TevfnrpaySlg&-0g_`!hpE^e5(wN5|sw&M)mM$gRUcrnXflE zAs54Pe&ERL!vWPRo}r_Qi*c_nsJY$rX74DyOpa+7o_tc4agEc9>Mn;f_AU48XJf8( z*;>Tp!`&O_s5O4`-o#Z`;ZV`zV%|&Fv~YwS6ky?EQPNNlsjoW5fl1MS|Ij!4y_Gf4 z1Z3V~6Y2O_8m`yM?x=Ol3=}v#Y9rZDt~Z*+2Q^quc&nplM@vz(&O2L;q}{8W93ZUi zB!m@E!oxVGPUA(FH25PPyfuz8gI&ycQ(Hw$utvzNK4(vK+43-!|7*^+Gor{Zc{e7Fr?gc`Op1cQXLy& zdUOkyIiX$|)57K6FgU+B@Sx%6r7ITl1aW=s*l*^Xs7p_IfhQgPv4!Q9`hvOy6@EeQ zXuj4TD}PW}-`|>!f=$FoxME_W-JU6jk;9*(1}_woZ7OMDkarEW5^mhbSUE>=gyRxo zU-!;?y?>keICRFwiu(qI9!fcoX&RLijt+nPiv{uOIVSuDL>=rxflDpw@54+(IM4jv z4(v3UC7);h)95W&h(ZWVunt1cb5QVo`#X-p)w<5H`n@_-ed{R}>9?eKsJsWIh!&)0 zTE$YkM2eA5RD>adOD zi$Zq?ZADGg=EBQ>UE~BrgW!DbE<_pbPT*Ij9zh5uxlGv+(e@+xYqS1M4%urVM@BOG z$w)b2vzRpED}tHfOOJ#4TR`GX@^4Hvyajw2lXvd68i zw?Alx0AbIogG6E4y|3eDu}nP(@9at+1{qSrB>T@beud8B7zHd=k$;TH{_#7 zT~Ktz^~}9dFa=7AI5#0WdFMSY@8^lk8CiR5x1u91H-2>NYS4;R6(Mnxd&q?eJi=p3 zyso|lQpIP?Xkgt^2cic1Oaf%-?ld0if*Ga30C1zNfi&(WiPueGD$`9j1^dy{{=5L-4Z< zHUZ(>5Cj@g&!-q%Tx!OXNCkY5O>xlQSnoNT>?XlWEz}qnRRQ#ElflAFa;=YfAR97T zt_FoHA3FG7G7$TIrzJmB(twVOELV~$H*yHW{aM6Z+b17c++PCu`EtcrLPT^e%=5(d zhDO29R4C$}FDh#Fq9L#&T)XQPsY;yUDB_^97p2vE1MwL0Rp`MzI3GCpyNewWxU1pa zjEM%uX^U#;Tu@9RXunvt9*=MhsRi4QNQN52k>ivx^+naVXv^H%9=*L56dh1C8PVo^ z13W`Gg3m@Xok$O|J$i0*oOxKr-9Ye(nKhs@_;%k+BL3+eq1H+e^q{XBH&iiOag=$6 z&|EE1d`sK^8Z?w4FQ=`j-J_anE)y4fAebJ_`-f&d_-KKR;)orao9@{`NU9%>lX#8V zQcrn-2kguEAr*iT6s!_{ns)-XG)n&TlHD`O!Cl5ei4Evlc=NcZ7F1{zuQ#qHGt94K z8DvgzETXPSLfXJ=5v625LcCzK{?K7J1ux+TQf{li~=56yIz1H zxuUy9a`8i@7H*Y=?_^`iADHZ@z`5na{ua&NvJ{2p;_sxl4Lx>Ap2&6dWmX5N+zpEy zd3H*Z{mEii*}irNr6ZVcuyQKbrvPgjb4~+niD*@idt8ha^NyrPO11>8Q%6^hi#S43 za*zV0`%U2mbsSL_>oE^Py*(qoU;d=6bG{USZa)lz>u!GH=R)WY2c8;2*B6To*3($j z{LsYPkXYReJvOQToJjp;3W ztuJl9)};_9*v}fhQ&A#B{VpTYXd^TO+!L_?!!J68DCHTECglMf8Lb3CMPNf?wHA3Q z15~0cv^f#$ST|E}kdxQNX@O60zY}_jE4MMJTdS2 zCc_Xg^>?C?WHwD;{LszV_`A4w#sF5Eks>kCw=NRZs9d@6$!@kLha)9gU?erY5foQ6y+7|^Gap9-m416h$N6Wbz? zNa?@)c7C87#W8MAnKSWj0p=u3t)pCkG?`^4?T&6yP~0jZ%bw$bx~D8}v{kSYV-p^! z7G1B4q^+F>&wI^FaW6z;i}<5L_q|Er#soR47@X=;-&Gz*Vb>91OY5yh*>m9gZvy1z zFY>Tu`>=Ub8-|8QW&0=DG0*u7cLwQU17u*1*I37{ie!73udbqYN1~5U ze|n#}Y$LS-OIJ*GBt%qK4!E;g#Z~!bU@nG|%wj@O`T^dd&k&~tV;>rvea*~X$A(eg z{z#4LoWhh)T3^UH^!3o8<=0YIXofbT4)RIv*`Lnu9VRF2f~$hZC`|pvqMda;7iQBUax!uan57m}~3=BzPyPDN~pX;iD$kG9=(}p%Bew_Z*hZ zW|%bIU9M522SMuHaL+l030pqim<;{Kl)jC?SLtEy0~7VUW_&FksBc56ws3t8Ij64? z6{xZPS$>8FL6p_7O*lKyEKk`TXsO3$T72r2KtHzOrI~XvXgSL6Upg7(EFRdzN&Ts$ zPA~vgL+4C8{BnP4{OnzUipjYi%8=0`l#~m(!G1#85{yg?qRC$dDJ>s`5azaJ7#Gxd zm`ojNd6zc&+y|^6g~e>JijWMuNZ}`6H$oy0^B%^82ocxlBM3zdA>2mLVnQ9;NNo`f zC7KVO&b+0qKi^2-06(%g`;*7~VLPo#0-bBVwSt(@e7F&YB0J*#-0&M$hAg%VXs|_+ z_^(0E5z`GgrKB4Nu_1)x>rFZJ}yz?a)iH_D& zOGj5U>wudVniTFoGex)ZZY)xpo)N&kwbhU^L4xL$&eQe+mE$(7AlPpQX5X=(?1WrB zrcK6)B5>oEH)6s20CION(a9W)QVe&*D+ynYt#ehUFv(SY7yj$2--J6X(<`|bLH?xR z?KejVAr>@@ne%g)FrH=^7a6-ClkBUEm%dablyfne>WN+J$udi!YT7fpKY6&eWPgVn zHtd|Mp@D4!C73qfzNTxOZUmQ{Ng##@j?3iUQA!*+aK~0)2Y#)mHF#0! zFx0mO*lQ#te9T%2obpH>BVo}s2`~tl^8_oL(Wbi+me^d#!E_ zv3Zu8@!t953>8W?#&oR+P(a@`Iv6DgEj~dfE~TCMFEy%&^3qnn@J!iuH^V1#n3V}v z*OlM^)$si=Vnv}ndWLu@&1fNs^YbGTeYRNIBW6{63IS~DHKXx;h7{CX811SZ7x&)? z-26k{h8)2#$) zX%g){G!~SHTGIsHH@sf1o}LeiK4cvEe> zZ%m?}tEP-}1r`qw#O&z3@p~L29O?7IN$G<>V=0TVWZPS?Tt4r$3oGi?_t6i#`(vp$JUoV}b{XBN(##c+ry)9Z%ucgdBJ962no=j4bw-?-GI~D^ z5lySZ)*aDO_z%W&_?_}Kz=Zfvn2>d|Q*P&2$3~~z1ivR&$>?DTHR+N3Hy)CUeId8P zDW)MaE(1%uLN7fvjX72!5k&0rVC8Xm8v3_gZrO?UclqddR>f(*ZV_h00^rY2?aa=^ zOJmhsd|O2jGBncXuk*15hvhdtk3fAf_YG*G>Zr*X5>H}a1N^J3RV5Rh z%}?7dwR?|z*rVs|bo7Z4gchH;LAHh~^@?Up@= zXHF@HZSBalN?CMfOlf3dN}+bJSJiVmm&h;oIN50nL95zMv5+-LL$wNzZ&xP zVV!u@)MeEqlyKY~yWXx5XImOo3&o7#-nG^YyYAw7bWR1qP9`~X6KKB|Qr1js=JjP+ z0#OUPxz}8(UcaPjwv-!wR3$6zCgoHxyRV9j3u=qQ{-;;!3>vMeCxTTz#)b$m^RGIE zp+8p1U}A$;jv#QYWDZsC^PBlL z8b=v!qZ;h$a03!%v5~eYBkW^mlYDOs^GeeAi8_b_{^+|AOOk5*uTNErMsh~6RZlMU%7vl@OeWqnwm)*+0qs+JA z*&`9Dn}~qS`*i-H05;K#`tzQqu_CA(UH*QOUm!`nwDd=x$|m^ZO~T7B)otOMEv9K${X=UArO?lYUso$iA#UC02QmKu! zQz|(WqDqzN(G(08*9K)7p#hi-jG(knn_ZMD?FyCN^j@cVo?mp(TQkUs<)9vBlZ~~V z{W#dI0&!r9V6?l{9CVIdY2>+I@+@u^QH>Bq?bBTi&X>^>@}WH;_#l)E1c^h=Eu{EE zIDN&(j*4FmgSl#uTurA!&omH!nl}>x@)FgXjZoJB*|{jUl{yj1ESHcpJv1bZ2#2NL z6t6$Cke!P8##LImSt5X^SG5?4;r$#1dR1n@v1oC_k{(5iyCwHc-f_SsaUoQw*%>qX z1&Y2Qer1AQs#leB`}H5c)Jo+6lf_i~BRR0Oz|)x9l)|Z+cAky26$x~XRaSOEhK?C* zJ=l=ZEe|yOi>&*fNiAYu`vmZ#C6|}jXKzx9XdC_hz5tE%i=kLSCVPpCeu9Xs@wZF; zGCVex^r$ueaH3k7fQZsK#?I811IX;&dqVRaB~TzKdp+qdhA49x>~Nm%>d4U!Bm!5! z-VJ0j7%}2P@b!GSg!b@Zz8WiZzfD*bYT?6)E;IOfN?_RIwt7;k0eP^+z?dfm2aYOa zNg;I7D{oe1)GIO#CBcX^Uuw<;l*1Qo5RdqrB;!ev;--jdZt$i6OBL+|AGAuhFG)#e zOU9m;76jBPXizy--cvCVP89Np#Ki2wO^kT?|9Nr?!<^I@z5?wj)8&uF#l)5CbN67RpfG7KY#qq9n%FV5Po~;fBo=uaOi!SJK|G0qrOa#Hj zXr%B%BrRVF6rI2&HO%#V#4IH$wkb_-y6tJ{I}E&eP%oB7mWCUcY?8?HHcys}KtOX8 z3zr0B!C34}g>hKI#pfy8%R!4ZEN8BLRqa$4#l-TB<1dq}=U>TEp<^0cbfR8?Et(*V zvb})ixUPnmxx%R+&zpNo>b1vZC^?=1Tt;S9RiBCX(Qv7=Q~PB^dH>q1lE41|Yx#(t z9;_*84HVmH*Yq!NAbmI%!LAL&vmYfC&2&GRzIhCK-**ObRsJj=_yw;qFoMYI&)n6= z06QNeD`M`fQ&FH4TYi68d>>l_Jjul1GguN9tqB67=5<4z86UYSEx_VG9t5ASxjb1= zw2$3l$#3ygIo3m;)i%t;81-@Ghx!_<_!(O$-IEoxIH4;X{T2+Vs9&q>q1mVIfNg&@yOJ7k zem?>N3MGr8jY`Ili}$a4nLf5owlot-sy7ydck#bhP3-wt@x^cDX}P+n2$>TI7yUN~ zKTMH7W^?z}j}iqJVIPX{Mk;aKbm&OdCqWwtp1X5-_jrs~aBxtdJCjbaObW&3MsU6G zOHxvu5w9}Oz*zLKudc!n0BZ+&UVp0;z>uHjl~X{kV?q@&ahRM5{V^>5Bo(0va)%c# zaU1ykQQR)N@X*iFobvyk?xpw!G7>@dg-I}_H)>I{Dy(foRas22r^iW2UKoL|NZx6a zG&Cv)u_XRc4M!_TFqr0!98DfSj8p}oyPl{Qs74Br#v&}*WwyACLN|`p{G0>Ba`L$J zKun%i`)zYM^9A34!us#|XE+v?Yh$R!N#c9&=C7>Iaq6guN5L<0OkbdLX{{NH*P5J3 zorc8wzqI!>h!ZD9>k`Q0<>BDwni};FD+FO@ksAr3f|*e*hFpj^-{Yoyy2ihgpLLaH z)1hmhS>tQrZ)IJ>jB@3yAFQSNk+d@(la#Z3HQl@w)8WeNR!&;J|2%}m3qHNQ*Kc9eE7N6vJBk)JJ z+U4u7$SVrVr*;0E^HHM+A=Xa5@cL|G30Yj>yxd=pZk%WgA_zX*(iomCpT|Frt5b;#frL-Pf zMC92Qv%9H6`dlH)uMrm|LtYo>Nrx7~Jt3YAR;JpvId;a5Ya-cG{C?bbRl?77Wr*$@ z3eajetGtZ5>XRlL>uOwm)+m&GHHsuJgYq5Yk3xBr-i@bUNB;>lpMhxs+()to7M%lk1>DKm9Lt!v9@B3$7?Oa^keWgI_QutHyTLNp6$t^ycNrH*L zuWMoVcE`0b6N5SC?(c_xrMzs^X=n3nIJIxn;eWRG*W(ozy^z5oByXZ((x_#Qltn#f zy`^;h;uh{HU;Wtz_rx@MnOEYw8vLp*`p&GvILH42KNyh3jhuYvob^>LLc84xIO^OI>>O8_ zTD?{uwipNYPo=GYwsrHw-Np_3>N{Jn$XoQ zAz>ZAtk+=SleK|-9>+44w%4esr&9;p)##hrWc?vLK^iNh>?)y( zh);dO%^ubEgGZ9vcoW#O!=K%G*ff6TjB|iHK{vonO3t_MRu?FJNF*bed)ws1&{h`M z&-(IgrO;@=R&a&I^+nbL$~vrRk7pc)9%kA}+B%~8(^&3OsV)c(lwBaBPnz`RRnEdrm2M`NfVKl`j(bJV~0Bxl-bsvI*ewQPGFu9VEx6jwpA)!5qF`C9|hedUt_?Y5pia!!~Vv)N|Mo_{?^gt)L5Wg3tW+}a|hvp6f&4E zA_L;8>B;orMtTu5nei^kp^k~hVKsyvpdYrGv_(yx@%s6@T$`9kyf1px9N6- z1jC546yth@FNId^<+DC;Cem&XSuXOWp+*YTh2}>fo7@OLJcWJT>L}i1$W2hOEarq$Q%CQ6@)Yn;S)%!JKcGAQx{yK|afrCB>7iAqpb21=tu zlsdrXHV}m0a$e!HT4to>_nd(R>7avj(uxv_LnTE4(bAERXqdHzzzs%-4+5QAo@P2B z5fLK|p4=&wZKe2g>O^$OEG@;&A#CJ(M%YLC1m%O}ifkbLmrL~fbpe&G`jZzly0zKWcL>#L?Ut1XAFUN&t%)w6y79-?$!N)N3L=v0XAUW|mRiM~(EetWXFGbdZpGD0Zu=y^Ot;^x_Gy!4 zZ~JA6FRHRfRPO@_-4|^$e^$N_`B)Y%_5D6fCI-Ks@-kw9!;xyKc+6?DYLG7cpB{nL z8i(7T4CX5e7YAW5&VaZaVt9t^ULYTM=1sUpMO14DY9{K!n`08%amu=;3{@}7+|XeJ zy1R4)?o{XadsjW(A52xT$5sygTqAE0656tySumXvSrII0r#rqA1-j7_$BhFbMJ_3Us2*JQ=IoOv55Nw=-RA%D#o@K~VupMh6za4$T!;F=VdPMLjbibuFau&@$vF$nxDTP;Z zcuo2Bd-tr#Lk!qO8vH_t7uarBL`63`VMKwTevWWRKP%%YV#J5{`6=8GBN%}gVmCJk zw-!=jRdv0QmR6l5I}_ua=)%r!VnnW?IHoIiIdl)H!$*V_2Pn+4Q#;d-b2{ErIiC^f z@1x15ybnn~C%GCWC)tg23&Q$9orpTYxPb>Uqt2AvL5M&9=FZ~!FH_bNMrZ>!U+UIO z;gY^cng8r8s-s>%y>IL(4SqhKZ{5#525GaB=_WkazSsZ@kUzkGVKqy!=Z_nysPzf`W)K$`w003frU#LFc%RU zcXqicp_F+DzPE-pDYZCMU4$BwY#fm`EFtkNMPEs3yM(9OLlq)5noxr(J2m0AnNGGg z+>mK!tW)}9-lxkvZLoYN&~z(z-Lf;7 zLeLyBx5b3P>=T%28tNQI9$70M8-6*_n#;F^JJ2T(t7sRI!TFpK_6fz`uzxL1_~RAk zt`r>-8i@PY5iZm)!eM3u6=S2!7S-7hyh6R+IVhY8JA)4`&&&;PxaW47d>CUkl=}w~ z)8_blX?>V(rs6`=7tzX~_)Vuf z_rE6>h08#)vsbVkcuw?eJVB^pL24g*&5vw3W&|o)$v{D9LK$h<)8;+WJ$%-$CGj{E z(I(ODBV8-XwRo{68llB_Ay{j5sTL!-#CymN-+w!L8Pg%zL#%@KC*G5HOw>-WT@6bl zXDBs$D7c=Yx$H@dMbKyHrwB)7YegM23T`w!hI1oJWsoerp^K!+BFQAE-BIHN;SzHN z;n*Xp{b`?TEOKz^0tsWJod+?8ws`b%xl>fOgh2n z%9=)ht0IiGeS{Ys+?vwZBdp-I#=56Ig%=&+nuvs9tJwQ`(Abc%J9^xBHEdOKhx(ZM zFHyB3Q3w`es9@>v_J&a>E67XGUk@)VQ2a^0(wyEbQQ1o0`kN!s_@?#o`uMiB*=6g} zDkwwcPkcyS{AEx{0F`Z3n0pWX^zE*2c^U2e)WLtY4u(6La;PB8{SA~+i-ZbxQ{3n;Zts0*=B4FZ`-s;JWGGC@{?GOfcQ(I`+)Sc z0Iq0MbjjGGAtN{8Y{u1B)6~UH`@b+L1EslYGa`jen_=t&!yMbZ?~-K7$&A`ljoY(E z;>{iWY3+u8mDvoCW?q`-%UX3Jq7wJU$XSt~-uw1H#2B0|MCprEt}KXpcf|9?JF?t! z--fD(<^TJZsKx?oxyH&VFnEq1TPTM!*?A03QJ$k zD+kY60u@)YOlfmqKtfB@CM3(vZPI+yXZvz)fY2;;;RI6~26k&*1O zX_yxnDc~s|dN_c>c8<6bNyJw9Pt`K7R7{M7<|9|#gk&0rxdSh7e^ns7m!V!q3ng4; zWc>9nZU4$k0RPhBsy5Ul>LQeX;_iLa@iL)yF@_@^%>LFI7&;O(DEIOQ?is_vbZt9+?JbyWx}O zPD{>PS!b$d);?QbK0O^dDuy)^>6Zmpl_rKX`j2J5Juj0~;=_N`Ws!%B&*je&7D3C} zY5O*^$u$-o{~k3QQo!jcB)CLPpB9hMF0h3;I_Mh9pB`-6-jPvM7W=0Ps~%4p6K%N0 z5#kktIlHr)3Ks9m-wmH1yUHVf?*5zWWZT3F)^BDkj0rJXtHnTw__Ate<@aTdd@HD= z`JJR-l5-Ur10K=?C49u}ja>6m>Qv-)%7GN7k!SFpaLRD0cRep#(rIj)S{@q0_guxF zy51^sEO%;4H>~KOMCy&N*}RTCB96pdkL{b%voI&p1}xae5GRRFuc(&#qoi+cpors{ z0q_rHkL%E|^+f~IT>KsVSNUDyXcP}fGnp~6NP!JVKNrG5;p+A1H6IIRGa_HTL$fq) zslpII^aa_>CY>E#eg5mp4d7Kfws!Az+2l(D*DABh9@-v_WN zx>9;(6ZZ+@X#89%6Mv|n4KIseDn*s)8P9x{xg+} z3)aH_fxEh!!kCj6iI^5D-yWgkM^TI;0`1-YCV-A@{FaGRPz!OGSno=n7{TEEWWa#{ zJ@Q)AHKP2nXm3JO-8sgjXK{T_gSg7C6<&d$y*D64P=OcmzKSL-%NDiWx^{zGxnc3e z*?Bv!e^Oc%TJkI2Bc3r28oy8^NI@Ve2a}CCY698@+Vt@{TLEpJEvoR-nMI;~rYRAu zgN0a5M>GM>4tHPEvxx$sSPO3kRwx8TLw&VHT#Kgguwu-6Bnti5z#4*W+Bc7SWH1!t zlMp8cZS`#uE2LTwjRLl>JUw;W-t2RnJ%Z8<&&rm(gWIpm{9C~Va-9^k#5$^1v2EkG z%v|S;h+PPBHXoIAwGL}f5!J{%bk)uMY+lZyULK)BRi{!f8ye zBI}myoRx|jfhi6#4@{@W1S>>);5h`|Ak0r*L@Y`JB2L;5o=MY z?7qR$LE!DHEZ=XOuVqoS2D`B^&(F$RM+=gWqAoeFGWI zUc(90(`Zt}=msQNKI#t3qNv0qL6jm^v~$7!LosBI>S!BxhrP>u+6~fN62|sXloY#R zzswl9Ysw*L1MXjjbk-X;3AzWDLGXy(2(}95w`6DTe@SV(5*wku!G*!A!A^Q4E$)(j z8Ao7`;}}F=>N2ayMLd3E?v1^K*lMd8+T|tZ)7*m<+TaMx$qF>TQcSiJPgTSCR2vmv0+w*^Hq{njxHLsz5*#B zeT{1>_r0wAtuO_;9|uRuID7YdzlSeONZdsXGdt~X#!{jePlMnRUMDGq%@#Y4%DF{; zY`8n8>vhl=KFdNwICOi{-qi4^_PHGjJ98vg9t|H`&6t7ZzvUDfvzP#XK! zSUMKx2SXdg6fY9BI4)|A!`r54Jxf|c-Ho@7m6QF zo0ld!+{{SgzB`|(Lha1%SOaho#r4> z6y`Nh;{WpRh4684FdlKm3h=|@X9h!Ce?Mj5nMTlTGRBt?Xxzn`W7d(bwEu_bd{lYLhX(T_)_3lsy! zC&tR*C;mfm>c8J~31Jk3A1k;%zPK+9(5DQ7%u%QPdBrUWvG4fZP=byVx+KaV8BkI?AK!*!>`(uxVXt_H;o{) zB~Du~-6YLd>yi&?fn|F9NMvr*o$3MjS#_pgYMABkA*;GAwG5ioBxrwM`S*I&r;(a_ zr8^hZq=)^cT-BE*S0lSYu_cS{gMCdg&$qgN6`RoS1|F$@5_cVwSP|YV@W`S^`z(Hk z(DRD7n8z;u3rPQ|GO4RVQGZ`KI%T@{GxsmEy9BP5Ltpyrp@`>1@lYZS;C}KY-WNAt_iRS=?~#QL#Oj_mXeK=I*POpiA`onPB(tRg&9@Zln|MBiP+)=*6{_4Ec-|c4 z5^b~5jHrB0;2}^^cf%_@8$IfnLgL^(RQ3-^oR+%Hd3DQd9rUj$Z^^ zSSKdU&<|&t5s3)6YiC6fJ>g8BuX=nm@We64O%QMq z<9}}9Dl?cvB?rl!!qv=6m9zQ7;njp<+sXUvP9O1-yK_TRY}3LW`+^TkSVc?0;rjHQ zoscgeh(L6ek9No=nQxbkF@C~X_;#RC@O1~vabHjb%j;$h!NIbBmH4(a`Ps#(jD#5j zNv-b_Z&D4RGfKA~_a>CCSTUtq)Ht&y*j{&HF+8MO%5p472!X)WAm|*CW_y*l{I5T3 zZ@9TY;dk}X2G-i>9B9hK@g&>41GBSR?n|Y$YOKu86OK?P2(YD z{GJysnho zuxJ&kE|gbmK`0dV8(+4iF}VwK18U?p=D{_cI{gSjb2MhZssjbN?Ke;It)FyS?4TDO zFKp;RVRAyXTttgiKfzwBRJ69s$&Ji&JhdPkTUhc({8(?(V2Kt4YP!4e`BOYiV{_K$ zx2o{yxlwJ#1Sv-}%rr^Hh|OQYBiB`YNvz-e zTv+vD%A}npsg(V(@M-$nWGYRC0JL-pcr(pg;JTMT#{>h(Yac6A=j0|_83Ho4NTq%y?83TPO zYzk~R=P?^v-^W~k?-|+dMjd1E5!vzSf}w5Aa%Cj(7mlv+1wt)H8DZ1Mf7jn(b4uIw zp__h(FAqmgNoGnt$gU+)2Xxe6DpE`Cx#8LOmED%*APX@^5}mZ|$@H*^QzYrW(RF}0*khh<%s}0MKTZS~|cHuokLH^Gb+KolDX4dwLO5I5<`Q?zeGM&vb5|=#Fohjv5 zRtu=L$#GygU-4qEwb-Sr^M2fK2H3Wo?1xv!XV2V6PK!#p4JW*G{(T3F5`l$!wVY$8 zzaZUSBP5RVK4M9WV7x)eZ%!n%=1cMzXKRt4s@K4Q)GnWI&ih*vwWwRw{vcVRBmO9U zz!h5_)%<8yGaQJpagAttw+LNB9(T91bVTn{Of#rHn-Mr4X_EL7`%YtLoS#6m_IJ2~ z7pj8E@ii~V7MV`tnnM{v$Z=*Yh4%BJ-92U@FPW-7NnfM%H*Zj?29BAWo9@ z-vM~S`YZYV)b}`6jb8|H?_@y5rgswdNhxhyhMzRx!U(qYV%|TQ@LhWk^u-Qwdw7OMzn*$a)-JFaGBX>ue>J&?q<34CAY7Mo7zzV~UapB52% zc-j~JCvwH&ph=4vIwW9|45suCrV1#^=uYI;!szE$;JYH7>nB^p0OLUMINDQ3D+OkH z1%PJh7FP?a3%wuvWBYDNVa z|F@s={+~N&#}NU-0?*jhIj=|IUbJ;CM-DVyFIL#BQC7-%FJsWCzjjtQ;#H@&NdkvB zTZGyn-pm=H$GaK>gY8ccSc>y|Ge5c1W1g(W3rCN5JaCfEh1nf)Gbc#nI=7yf2)4TB z49sf7z^uB-lD+^h*VG`w8z~eNjjOCzVVv$|tRpiy+ZJj&&aafTp%1WzMZlQv0t?+q z1I!_4kL77X5F|F1)=qL9=g(q=Fi`fWBZo&8A%Xn)z|$r1QRPYSPy%RY9T##86!W+k zMonNRA(lE1$0WI)z(;9fDsC2+6#Zs~zfitJr) zlQs%xmoX2Lc3(c;01MZg7As3+K7(Y|nx*{C(X@#m_bjJIgwAV2Z>ul!L6!yr2m=L+ zfMEJ-qJZvETucod&JGPa6y*ESQzVKYwLF(}-#gS-H?XY_OyRE;{q8vB3+(NA&;#U; z9DMC2CCuZncUE!^g1|jC{=mH5mGqmJ^<*}3JePe9a z)|f=JH_#50Gfj=4SBk~AAs#G?Dfu{*HCu_C#Vq`e>AT!S-~MXk?nqY}&wv5KTM%R+ z9zizXrNcg=5ITmy`R}KW;pBy~z6QuecE!Fe=)NUK^u;};@UpiU4FM76A0EsWtp!PN zheY)?u9S_H0R?i1^g%Zu2CA4bYhH@;GxrHtS{97f=Rd)`n<;qj$r<)hJ@dJH&-6C_ zuEz_vXNT78Sx@Jl?BPK;RtWmae4yJt246NPy|6bvSoI%c!F8z~ndxxlJ%Lm3SwvtE z@zVM2Y%~Cv#duN+I>|Cmhyn|O$M9be9dlYRowWN$ay|+%%V{tu==@64J9^a+u6t+w z!gcq(wI{G+@_fa1$I>&j{em$YpMSlYUg;N1sSLj973Rs~>9)?fjk_sWd!W_r92kch zJplrGt|B$Mm&YYOy6Wg{oT8=2A_wHa?j!D+v+Sw-6%&Rs7N+G%<05qygF|G7_0N^! zM(0x&NA?iYrMXuU=h(|nPbZNG7kv5O0&I0kmSTzfyS3T}^$9{0_jB;pOh z&@fK{yvkYMqz&$sX8&!h^^yacQx%PZ?)kwl!TE$Mw{(Ed8mhj}ZX| z#j>8v;tLrd_Zx>rsJ~g&tMeh>3Wx0ElCUt4Zd37VUr?N54|@V;a5z4X;3$2UVbE3Y zrJj9dqtUr$9rm~$jSIFB$z#=sH9bo~j>(bCTD^pCw4{-a@Nnfcil#X=v%a8aHC5jw zN#+^@rX`L&-89`fO$oeF>v&Lh!u3^Ls6;+QGv`)Hj_@MekSpkvMsmY-N&LO*o)VK5 z87+-JvFLpds2W;~fX^$yPMJdPYA9xZ8~kfg%y=LEtH0Xgckm%z6@z3xe9 z#klN^U{=-rp3UnPmKTo9a-5SrdWdjj^WG6Lb03C?VygNTZPIci2!=~#kZKyHlhxdP z-@rebXw^0n^UJGt7}_p*hD{DQYZzPY=f@~m5fyz7WQQ3^55rQk_Iq4tuuDFP0b5>m z)8~6-e?(|+x%dZ*-aY|6VL?TgPrY>ful!!=HAVox>7e@wa zVL*4E0@Ge8+y<8Qg<5qOPQI4FkQrHq%{BX7vq)K#p-@KG(#9%1ViV+VHx^D$eq1>r4%YqvT^u_j}XDUu^zdeR;v zH3FT&&bgr;<6W|ID8`1|8J1`Q)g0%Ka@q*ce#b5;Rvg=jEy5UUISL|ag9?;0!d|6E zH3>PUI?DqqZ~Dgm4*VB_iwUBOM;dpt+GFJ(=@WW!RMLJ{z9%0RF?t zC@@ZV+uJW|;A}cVN0g4VoU_wip8&ytMYyY5x>eZkHh&rxup4G}$7=0f72>rgCWs0; zSDAH({|!ohrFD)Sl=GqZ9GlGFGDzO)L+8q3b4a5EU0U>Yk#+AcJ26+w4}o$qQyzn< zk;)^twQWmCVbGau7C$p<-=Aw1cVN@>4ZJ}VD|D~0B!upK-wzHELUa4Qtio&n9o(*{ z7kN16{tz&48z{Cv)Q>RI^?156_mQgHV|N2BfHqof6D7Zr&bRn0;xglOi9iriY59y` z%n>bK$SRxrNL>QtkFbtgHv@kl1>rhEenbf#1%lcDof%L{&mSrL=v+hwb^+l#q9B1l zV6LbS30haI=v_nQsKOBx&eT?*(Xx79Y_1-XK7sQ{hL{;t^xV0_b~k)bq2(7VEk2mh za2_C>j%xc-3OE`a?UW)!#<$c(G#W-z^zTHUL3+iYX?-qeAZ%)b`cI*I)`cGXq{90q z1y~?zuW|2&Iea&(kIvx=S5$W>C%EhA7qRY6v`9!$5I+(a8l<+_*>I(_LZF?p={n^a zjLrYK8lhi}IQ>G~BYta~YxE%*)OsGL1%)D0_+#GGPA7{crBee7{fL>MwAomsguBAL zc&5Ft!0^A{G!Tc;{2i2}{o%F4Y$P{?dxkvYTJK{qhn699QZ%fX*L}>u(yadt+P-@g z{Yl$c%13m(EVjeZPg1;HN3j-nr)uh!j8_;lGriY>n1Sm65Im>wKxz3UwcA)0W0>p- zK}?FY_~vG$+%M0gxV0z7J6;Wg!K`mJR$*Mx$VR49yesC1v%c~E^ljX{I3*%zwysqJ zfW?9YOz!T`Y&;s5fkAU(hsK{e9LCuC@q(U*q7UCQ3fRdBq@HUbLh@e58fzjnMeCrr z1mJ0=>vl;L^;8f}J@#XG`!%PCPD$vt%sh8wKrGqcl)J+K=SzSBRj!I$e0=K*g)FAU z1>Y6@EbSA{c3eJTau_cALwfQMSkV=i0B!bh9G-c?v-`S#rEKt{Ot4B)pXQquHnm%8 z4OI8=;$>p(FjN1ODRx!g7;U_z@dajWGm8xQ6Bt2MNOR0k&Ygf)VdbI-JU(En(Jl#-ZD(vQw&;ki&P;tXaWGQp6dNRefbWNrkYLM!KLgb^YzbkW$ucu(pPEf<-NwtfZ?K=X`zLD&vxv% zkXdW{6W?hAlsWftL6^XLZ+erML1skdHZ2MrK7j z+m19B9kJ3f>;yO>0~LSbi2wE^fYT^7py^^)tQQuX1x3{$AWJ58Mq-^R1D{RrFHlA5 zZXKDrW)0J)1rEE`Ao8^S+6^yMU`eFF%1IRw__}n5aBtP39u!R<=G5F)c`+F*Lqi-j z3|8sc@*SZsr!9Xh37klr-}&Rl-H?D0t)ZnOosSQN82+*db^XXt+vOOWAzdpjUFDzP zyS>5_yw?D?7Hw*5rpF7j{;-_qVbQv4K zyAgU}Gk=qM!LipK`_nZwyqa_T`Ms)t0Y&=x%*u_!2JW*4%xHN(ZvVGuFD4q$Emx=H zp^@1IEqHii%gNzt1@MDJwIaStDZ5=qAw~$oCIQcGxw+{h3x81(LjER;rki*ls#$1d;y}yQUQ|CnaZH~_JxIsY-;$yDjQ$X@Yami@L@a2d_ z=*|*_`OE)A3JSGIM9710NV&tjI0Jnr5auBKzL{b{RsdMT%GFBLwGo%1-|6Vckb7G4 z74tw|j-SL=Fd0PtmNX}UX1-lxVK{Lut7yx(<6Zn9?zirn1;g1v64S42yG?SG43n&* zhM#9&u1-(Uh;^?@YZY{R7*MPDRaI3j4?iHtN&K6lS=EEdEyuDWGU)eScihxrNyWsV zkO)h-Fth!cWmzOLp?DEP(V-WU`3LF82A+USC+*aAZ@s7NkeZrm<#o4NAexN=@ZB=O z*bH9m@hWXq35l@qBmTD)*{;0s6|n1nHD;K@xV-Q6x5|p*qJR};-t8J%fNOz2mqUCw zmdp40FyB`VrVPh-z%icf{dSOX_%X*0kaB>;o!NlD-xOO4jYeDUo=nd6d}D2TuzlVW z&zQ#LF|JFjhwAJV&@ z7|8`3zfNi+4<|$s(IPKY>ue!L|MPJ(8U_+n8jg>~*HGfC+H2-xg#`Rt&H!XO^3R&b zMm|~QACtsRTHD%qxN_J7sf>(rJ|MV>2=0G%U6q@A8_U`xw&OK|ydH}kJoA!ZF`I#_ zpE)v7^&2Bu6dOUUy&-IcSACs9mM_KFf*bX=JteR+05@WUt-T6MB{jIYqQC%QHiU2YG zp{FQy;y`DNvv^EoEonpnF#(j*pZ5YLzF>tZ-ob`}@=I$+l^Q8-R3o zcD1Vtq#79jfS0j?9d%(;y{V+b3uCmgPYQ^og{P^THsMt65i;@AR2)FUfNO`U(2OjN zA8!`FLl%xs+ZB+Fky;46O;?_;I#*iC-|x4M&_%a=xxw{651s|C)#{}HX9suZAYXT7 zKmdRgP6#iS?{(Ak(p$QHsZgUExTp~^*B=?TPl#2{9>&A&?9K~YwAi6)1IP5*!duY zB`=9Hr<8L+lUm$2y|0c}-*TZycma>l*cX#JZ6CJFK8%jn`5B4uZtcZD{SGtyMyEFX zbc^9>V^#Yz&Nb5O3(qO7Nk)k3x{9UwFB>UIr*q8g z)w_;ffrslmGxApg(77mmvpmzCQfn zCCHH&8e5Eq-Mx=G{HyDQ=LV878Vg^P=bvsr6*3+rOjkAiu^5!c&&M(DY9Zqb-tiIf z5!t8$=|}q9Qf%-ViR;98UxpW&KTPCy1z2+W^`+k-TRB9vdE7k%Ay^FLLhkT(Cz~HB zqoZIFa}WR&IyV5QvSnNExoo&OmY#^9d*O8G29X-F&5OI4AB_po2LiPAT1?l=<4&GnFOtMaGP&gq%VYQFM%Hj+9{0<;?3V~oP7aQnA!wp`Y zn)`;I2T2C8gczRb=B+QQUFgV}J#BnGgH4VM(!z_Sh)NBK2mbLoA35^=o`;KeoV`b? z8G!IQ;|v7p7`y!}xCJeu6TE8i!c^D!UCeAX*^RUQ<{$9fRAju`U~{a152^Xr4yeol zlEEqa(oPE#BhL(|I5ztX4ryqDLqCvb5S|bel9dxhSOzXaAwkT_LekN=z*5cyZ$1kG zw;80>#W3(OQFwd?s(qX{LVyy9o2$w!$!clEP!)EIegr4Qxsq9>zkbwHI?-s}>t5`k zpR{5u`VkVNc~gbs`0#{th9~l$=_}n#_tsv9aXR_vaLd4m#zDX7T>^Y52_R7ptxFV$ zo0~s-fed?Z^MJIq)%{RRW+{MXbvpNhhcrx-Cm?SV^zm8xZH|p+(V#W!9FVqQNa`Og zJPA+<8=UYgrpbO=H&4Bfa^sIw`bMcMK&Pc)8 z{RRynGrJbu8HhZ-d&2)`yUz!NxlLcR1AMo{o09*APi*wLcj6z3{5J<_?Z0WNFF}cR_iMcA`roRWb@#d{94i{0sn_HdLldmzTa!HS z$rj(tYl86K^d?{Q17SP@KqS%%G^u&heD*ioA34qubFWmLTy2J>hJ81(^S`Hb#CYD# z17c5Te+o~>N4j6$VKUdyWFyM#Z|S19tFFnwH>9@Mdrq-&C$pmTO?erfb42SAW@^fI zeeciaH#Y{>_uLBIxTXH<#^OAO?ja+vzcmR&A)0npb}T<9DRM6T@oLQaXY%#9V{@bgjOWzkKR+KkJRNBZ@r%q)+*~ryTo5j_?GKk?qIESOn`4b1oXl!$U8&C zkZzUBc7_24xLmDlsW&}$E|Q>4v)eUOdpxhHsyrD`jVXsEHOEq% zNq=5;C^d?rjK+aimU5LK5sucNljTbQj4)QFtY1_Uk>D@BV8o~a(c~t!gkXq!<@ggw zp}ONzq3Ztj*hIZ06{lNv$te@thgV;zKiG5`X+h>^t$)15D4}UBs)WWC4K{)32GWN8;Q9zVS=M zEL(d;WN8&lI(hNmA1>?P%?S5Q*h z`nj0MQORB`wduGMD}vXGzTl0iD0V@%>|1e z=S9^8*6YS8Y_L;J9VYR2Vcis>QpLE(SN0$Oevd8L??$IUf3C`}`?J%k<2zSZQ`{%T zRE=nlvy?IuHv;!h=zQMn!w+j^^?#dATnP0`Ev+-Xd{?}+^L=lH)uMtX68feuF{)V_ z`KS~qMQAQ6tOoKf(Jf-wQ2O6(wHoXdl7?bX+r4CxvSkMt>$loER`3eA>U0!Q+*CH) z1b&i`VXQ0XvXK%+ueclfO=eGLKfJtP(4ajSQjIV5|L<0S@mf)3J9iq|C#W5(_Gq7KJ@5i`Y!qm3$NRLZ9zr++Iz4ZtejS?EM8Ke1u*GYf$RTCn*uGo4ThqBazQQ(!e)ky_Zxff%rsXcGYMvIobg@ z)m}~q&T9C^-$T+qaLt)&!aHq?Dhzh%<{gp`F%s2gleG43!$2DA{>GwZGk&^AY z>Ra8M&pP8^L-CJ4d?649Uw%B6;s?4!Cz)cl=qYEf6TyJLit@+_g}*x&Z=OhNJYQCy5fMT zSeLw$<$QckVk65;v+&%i&e}u+zQh8En)Uc-&cD-%{^2XjC{Sfvfa|ZgNPhonpleq) zH9uxRnlLp#M}f~cQzgIOkcHKtYjVoT$X3@G%>QFX4_%UnJt%qP%)`Gf1|*eY~CO z3?^40GgLMev~u(KOd$gGRR}DQEfXKW=B#IH#!4V`wkL8>vr3Cl7Y&k4l!#CU<62_W zA(I8epgW$*{iZSAqyun`_j(5}kf9h@i{9bCL7O2>f&sE}ZNW8;T>E`D-BC06TMl6Gi zwW|C$aW5sRLnKRr4I4&&A|pMA$zYLQxme&v^L@3bl>w?39e6fZ0M0%z6L%wMY>@S~ z%H-`|dtoNVqHG{DP;I=iH&d`?R2w!Nq2 z%;{kSg4K?!xr_-7~nQ^U(0V+rRwv4LroD)!X5qw6E(vW@x0> z^?A>qY&~?=LAmbI_tEmVeCNhG52WP6zM$DWu9X^Kg4I37J+9zZc+_?BhwTSCu`R^N zyyf-SR{Z8vaQqDlnJ@~q82+m+Rk0mlCFU{_%~%I80E_;gx}Pgwdg?Cn#``S9O9JIa ztUfG6s1v|(5E2^x%$VCA*tIENBtW=tS9M^+e!>3LvZU>}fg_%)`rK$SPnpxcrJt+A z>Y|l-*+p>*dnKtGbx6Q?4vlx@`3RBf&1>3)Z%bO?2V~X3hssb5x6vjA@E+n>6Awt< z<748U5AKpa_a%^?_hWXDY4Y=z-;ySe4sUOK+C(Ft(rkPrP#9#~5W`WhC&Rtt!$ES( z>o{|+*)n!{s7pFX6W>;hRd*7Udnqvyh5$lMB(co{9$uzR?Ylq?gz64>$CA2%-_G^W z0p`|n7Y>at5FJx4c zd=NHJX1s}JrF{>i;E)URk6JMfEX$%Wd^yT5J+hwVCV&RYt6QlaFg|}*^<&i~+Gyhu zd-`IsycaeHb896urehrt9nX*wn3`H!(%YYcUc?*S zACUV|^23NcM_N`H{KXr%#(^#ByYcNU%;mYJ!*A&p~qDljU`7ydQjJKgG@~ zM2xYL+X63oU<6jg{MizDESuf6IQhfeG~$&-@}2JnpCj;{$lCmv`n(cKY%mfy(u+!C z)P~`x{mPp-p}4j65*(R#VMP>xdPZ81V&rUg-+QSxv+DG~Fr6)^YlQebuFce2fm)Ck zjv%)$w)6GQ3^x}Mic0lm=FuJQVNnncE7ty6Oq4KEDns-3+Mb}}*&}AgS}OptqJt#L zwK$b-_E9wJt?qAbQ6OQL62|bwvD*Wm7j%ddp%h8OxpfbiBUBMFR6pCxZ#fd^#OOdtMD%!UgCRanH2qxh$<6a^D6t9c z<%h0rFU7r=g~2%>k(In9x;7Jgs5LBz9ULqcn!;nnR+rjYfE_(_=HY#I<+-*MfbhHryxZFw)v*n$d7 zlWj9^3P;j0dzV#42|crR9b{x=yI(`Gr+huz>8+P7X$w(cd`ns{(<~y-4#zb?KDVQJ zF$#8{T!vg>e4z;vEa z^9-2soER$!B=yYDsNjV3UeeTmU)mD9FL8XTz7-cE@y~COV|tt~nQ?ej4Xk?%tXHfl z%5_K>|0&g#`q8Jk7p6Qr7VA0b)IzY+1Pi$tX4Ek-ThX^TUZNTM=oRjP{XU__i%zH{ zGj!O?%gb)IP}=Tbrl8=Sh@@h>`mjWb0G~_!^_AR{gz>f5-&_Aj^83odO9HI!9nm-8 z{eH$7)jxqO`uV-dY;AS*L?63!-%=w3dJH)|=^YN+nt1`nH*YjKj9S#VZn3k~V_rf1 zceM&5bb}7li1GO*oUOlDWAWT5_rObGfdX=;2w{t~EJiuh*}BFEnH_VqV;$ zH=xCk`%iku&302YK6ExD0(N+KSil?gN5iyx^p8z$Cbu_|gi}lfrl&7sycq>s+7$8iGV%+N>%hVJw|Ax~kK9l4W8 zXamun>CYyy0epNPe39kV$nLT!~US2v2{NhJ6cShN5XZ zthN1IeRqv`|8=9Ju>i(U9)Xo!s`j?f?RdfN@6&x2utkD?ao;gOXM)RU@Zb2g7C9R; zN2DimKb!DwD%Gg~nPZ-!R!VyMaRSkSeqd3Iyy7aY#cLH8e= zRVT2o=2_#KfItXokLZY}GiB&gm{s~0FYU!KYE6?+jLwigC-5q#`N3eR<2+&7BgP^) zTSWZr*x1oz8JnOoHQH2$KpEcbJ7sT+VFC{ao!$4Z}fX~ufKkf-&(mAWQLA%GAI`7 zGd-FA`}=&;ffmBR$H%vYG;sWEvM$?@Wn?0;?KZVeI=+s(eyu@{|L4+x_J09w2a@;| zEfi(0kn$4?FM+VnVXv(ZJ6_0?Je=C{3bFbIz+__T=hf@R* zC=sq?>2(SS;cYl)O7os@pnGoBs#R+4+_|b>zkW$y0wh`pD_X^1zlr#UcpzROs(1y& zE|qR(jweERsX6<&e*L;yzI?eZNNV5`hJswT-Lw)E5F)yWw3B9;5PKDT1?RBrcg~zS znjJcHNLm4j^ZNOgoNsIh%WoBxD-M3)D}EpGzrUuYM(-53W5*72SuR`;B;ppz^?X9> zKmj2dW(?ny=3U_k98Z}tMT^Mn*|RAi>?2|NfM|dyfan0rawmv=s-qR4fDla=5_luO z752q1UI+;0N6bpUG+68-Y<)m5FwX(;aBLCcR^OM$TR7?|AVl+uNGEB2t(;Qv*nfT2 ztXXR2%$aJ!gbBPrL}+m2$Pu-B_inXo*Dh6C8`+)%D-nMrHt8X+5Cw#Ic|y`pnqLaB z{8x4C*ij2fPEL**H*Oq%@v@J=_Hq39aaCMgtcByzqet#jSdRM>u|?N7x+x&Us}qvJ z()?WbO&oXX)Jc0I)2B~Y6DLlj!D1hQ=>s0eo;`cCcszUCWFIZn$AJR})ZV>&RY^&Sdi?mYdwQ$z7h-$<$+aho=6w1_}Vz*u#*XcXB;bM%3w%@7O_;_>X+Gxw=K34cZ5pnwp;77}bU z@&n;=_Z&Qtf`S6oty?!5MfUO1`nY@duFB8P*Pe%gKM3CuTT{W&NdY0kBP4yL`G)X& z!hXYt57*0nr%jut1`i%gqsTs*t&a;AE~tI`_UUE0*j8463Gv?v|1379ildPNLWEaH zP?fP*I10zTd-v8J37DOoO{2&@8r?@}X{q)$z#BJixTjAEbHuh(bM#R_h{O?+9BEbw zbDd)~dGh24QEQ7vk$uE^&9!US)WL%XBQ$KtPT_lUT+Go%0U;7sNQOzXQaK5Uz<>b* z)TBw1bQNGM0mg2LG`#G?1t!KJ7KaZXR)-E9Qdh2AF`q^P;QL}r&T@27K#1fK5>#bG z^ zZB+fHR*O}AIeCpT&efrSwCSV_!1Yf&$tu8lm^X5(SX_QmnBrFko z@r;DcDkzvY`Le2C*)`989!rGN($X{s4jib7bDcYPPHF)`IO@`+OX~9F%et+stklF( zTMukk^(&@6;T#4*0U;7qOnOLzZ1~~AEMcZ2F8>FJ2QuU#Bq*~2A{5rQZ{LKgDvT<& zRaI3w^n_9&AVdNg-wz%<@OsN9!gInhA*vUim9c!AcNakcA(By4UYBNw!wiRSc)hl5 z+qP=-=+UZY&z^dvSC1Y&G`n~2uIE-hO{|c*b?cTodGe%s z`t+&apExQ+DfDvTc~Hjfr@U(j3J5{?iwb7iv9}4tWuVg{6gEEE`ye35QGo~J5-Y&k zw{Ner>(RDr*G|6MHCaL?TLOr)@99 z>rxE16}EPW{eM5_{7DRwe+!>GM69XKVXY$!HNr>22TqIhZY+y}+W`-vTE^NfemevO zgdhllNP2vpmw+G$f}ns91VIoK5P~2Gf&xMi1VK Date: Thu, 24 Nov 2022 15:41:43 +0100 Subject: [PATCH 103/202] celaction rename variant to current --- openpype/settings/defaults/system_settings/applications.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 7f375a0a20..aa1d7387d0 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1268,7 +1268,7 @@ "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "local": { + "current": { "enabled": true, "variant_label": "Local", "use_python_2": false, From 0f1995e2972e526bf3f3f2c8e6c53363bd040264 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 15:54:37 +0100 Subject: [PATCH 104/202] celaction current variant settings --- openpype/settings/defaults/system_settings/applications.json | 2 +- .../schemas/system_schema/host_settings/schema_celaction.json | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index aa1d7387d0..77b91037c9 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1270,7 +1270,7 @@ "variants": { "current": { "enabled": true, - "variant_label": "Local", + "variant_label": "Current", "use_python_2": false, "executables": { "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index 82be15c3b0..b104e3bb82 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -28,8 +28,8 @@ "name": "template_host_variant", "template_data": [ { - "app_variant_label": "Local", - "app_variant": "local" + "app_variant_label": "Current", + "app_variant": "current" } ] } From 08691e257d460ce537a62680d0e3eadf212858e5 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:13:06 +0100 Subject: [PATCH 105/202] celaction: ext added to anatomy data --- .../hosts/celaction/plugins/publish/collect_render_path.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index 9cbb0e4880..ec89fc2e35 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -21,7 +21,8 @@ class CollectRenderPath(pyblish.api.InstancePlugin): padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ "frame": f"%0{padding}d", - "representation": "png" + "representation": "png", + "ext": "png" }) anatomy_filled = anatomy.format(anatomy_data) From 72840c2805460aeb469388ef02b223b2ca98617f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 16:13:36 +0100 Subject: [PATCH 106/202] do not validate existence of maketx path after calling 'get_oiio_tools_path' --- openpype/hosts/maya/plugins/publish/extract_look.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 403b4ee6bc..df07a674dc 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -90,7 +90,7 @@ def maketx(source, destination, args, logger): maketx_path = get_oiio_tools_path("maketx") - if not os.path.exists(maketx_path): + if not maketx_path: print( "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") From 0167886c1396cbdd76ddae583e68217b9f165515 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 16:21:40 +0100 Subject: [PATCH 107/202] celaction: removing resolution from cli --- openpype/hosts/celaction/api/cli.py | 6 ------ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +--- .../plugins/publish/collect_celaction_instances.py | 9 ++++----- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..1214898e3b 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,12 +35,6 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index b14fb12797..e4a3bee5ee 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,9 +50,7 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*", + "--frameEnd *END*" ] winreg.SetValueEx( diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index 1d2d9da1af..b5f99a1416 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -52,8 +52,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "subset": subset, "label": scene_file, "family": family, - "families": [family, "ftrack"], - "representations": list() + "families": [], + "representations": [] }) # adding basic script data @@ -72,7 +72,6 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): self.log.info('Publishing Celaction workfile') # render instance - family = "render.farm" subset = f"render{task}Main" instance = context.create_instance(name=subset) # getting instance state @@ -81,8 +80,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): # add assetEntity data into instance instance.data.update({ "label": "{} - farm".format(subset), - "family": family, - "families": [family], + "family": "render.farm", + "families": [], "subset": subset }) From 31babaac5fa7c33126dad277d4e28b4ff5aef184 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:07:39 +0100 Subject: [PATCH 108/202] change how extensions are checked when finding executable --- openpype/lib/vendor_bin_utils.py | 58 +++++++++++++++++++------------- 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 099f9a34ba..91ba94c60e 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -70,24 +70,21 @@ def find_executable(executable): low_platform = platform.system().lower() _, ext = os.path.splitext(executable) - # Prepare variants for which it will be looked - variants = [executable] - # Add other extension variants only if passed executable does not have one - if not ext: - if low_platform == "windows": - exts = [".exe", ".ps1", ".bat"] - for ext in os.getenv("PATHEXT", "").split(os.pathsep): - ext = ext.lower() - if ext and ext not in exts: - exts.append(ext) - else: - exts = [".sh"] + # Prepare extensions to check + exts = set() + if ext: + exts.add(ext.lower()) - for ext in exts: - variant = executable + ext - if is_file_executable(variant): - return variant - variants.append(variant) + else: + # Add other possible extension variants only if passed executable + # does not have any + if low_platform == "windows": + exts |= {".exe", ".ps1", ".bat"} + for ext in os.getenv("PATHEXT", "").split(os.pathsep): + exts.add(ext.lower()) + + else: + exts |= {".sh"} # Get paths where to look for executable path_str = os.environ.get("PATH", None) @@ -97,13 +94,26 @@ def find_executable(executable): elif hasattr(os, "defpath"): path_str = os.defpath - if path_str: - paths = path_str.split(os.pathsep) - for path in paths: - for variant in variants: - filepath = os.path.abspath(os.path.join(path, variant)) - if is_file_executable(filepath): - return filepath + if not path_str: + return None + + paths = path_str.split(os.pathsep) + for path in paths: + if not os.path.isdir(path): + continue + for filename in os.listdir(path): + filepath = os.path.abspath(os.path.join(path, filename)) + # Filename matches executable exactly + if filename == executable and is_file_executable(filepath): + return filepath + + basename, ext = os.path.splitext(filename) + if ( + basename == executable + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath return None From 3ca4c04a158b99e77d6f18b171ababd91d02eae0 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:08:45 +0100 Subject: [PATCH 109/202] added ability to fill only extension when is missing --- openpype/lib/vendor_bin_utils.py | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 91ba94c60e..16e2c197f9 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -60,9 +60,10 @@ def find_executable(executable): path to file. Returns: - str: Full path to executable with extension (is file). - None: When the executable was not found. + Union[str, None]: Full path to executable with extension which was + found otherwise None. """ + # Skip if passed path is file if is_file_executable(executable): return executable @@ -86,6 +87,21 @@ def find_executable(executable): else: exts |= {".sh"} + # Executable is a path but there may be missing extension + # - this can happen primarily on windows where + # e.g. "ffmpeg" should be "ffmpeg.exe" + exe_dir, exe_filename = os.path.split(executable) + if exe_dir and os.path.isdir(exe_dir): + for filename in os.listdir(exe_dir): + filepath = os.path.join(exe_dir, filename) + basename, ext = os.path.splitext(filename) + if ( + basename == exe_filename + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath + # Get paths where to look for executable path_str = os.environ.get("PATH", None) if path_str is None: @@ -114,6 +130,7 @@ def find_executable(executable): and is_file_executable(filepath) ): return filepath + return None From 453cada172b5962921af9d3dc61c64b0b379d277 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 24 Nov 2022 17:09:16 +0100 Subject: [PATCH 110/202] change how oiio tools executables are found --- openpype/lib/vendor_bin_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 16e2c197f9..b6797dbba0 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -299,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"): oiio_dir = get_vendor_bin_path("oiio") if platform.system().lower() == "linux": oiio_dir = os.path.join(oiio_dir, "bin") - default_path = os.path.join(oiio_dir, tool) - if _oiio_executable_validation(default_path): + default_path = find_executable(os.path.join(oiio_dir, tool)) + if default_path and _oiio_executable_validation(default_path): tool_executable_path = default_path # Look to PATH for the tool From 6925a96ee64a57ee928b39d951a85a44296469ba Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:09:48 +0100 Subject: [PATCH 111/202] celaction: return back resolution override --- openpype/hosts/celaction/api/cli.py | 6 ++++++ openpype/hosts/celaction/hooks/pre_celaction_setup.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 1214898e3b..e00a50cbec 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -35,6 +35,12 @@ def cli(): parser.add_argument("--frameEnd", help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index e4a3bee5ee..81f77c1654 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -50,7 +50,9 @@ class CelactionPrelaunchHook(PreLaunchHook): "--currentFile *SCENE*", "--chunk *CHUNK*", "--frameStart *START*", - "--frameEnd *END*" + "--frameEnd *END*", + "--resolutionWidth *X*", + "--resolutionHeight *Y*" ] winreg.SetValueEx( From a17f516597a39402e8032bc66b5027f47b199086 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:10:07 +0100 Subject: [PATCH 112/202] celaction: deadline submitter to modules --- .../plugins/publish/submit_celaction_deadline.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) rename openpype/{hosts/celaction => modules/deadline}/plugins/publish/submit_celaction_deadline.py (95%) diff --git a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py similarity index 95% rename from openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea109e9445..8a3160e83d 100644 --- a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -37,13 +37,12 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): instance.data["toBeRenderedOn"] = "deadline" context = instance.context - deadline_url = ( - context.data["system_settings"] - ["modules"] - ["deadline"] - ["DEADLINE_REST_URL"] - ) - assert deadline_url, "Requires DEADLINE_REST_URL" + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + # if custom one is set in instance, use that + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") + assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) self._comment = context.data.get("comment", "") From 66bbaf6fccce75e879f29729443531786693efab Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 24 Nov 2022 17:13:21 +0100 Subject: [PATCH 113/202] celaction: project width and height to hook --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 81f77c1654..cde3a0c723 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -19,6 +19,10 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): + project_doc = self.data["project_doc"] + width = project_doc["data"]["resolutionWidth"] + height = project_doc["data"]["resolutionHeight"] + # Add workfile path to launch arguments workfile_path = self.workfile_path() if workfile_path: @@ -70,8 +74,8 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) - winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) + winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width) + winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height) # making sure message dialogs don't appear when overwriting path_overwrite_scene = "\\".join([ From d7e6b030fe0ac0323b332b14b2c9bb39e839312b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 11:55:21 +0100 Subject: [PATCH 114/202] refactore extract hierarchy plugin --- .../publish/extract_hierarchy_avalon.py | 369 +++++++++++------- 1 file changed, 223 insertions(+), 146 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 6b4e5f48c5..a9c0593f9f 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,9 +1,8 @@ +import collections from copy import deepcopy import pyblish.api from openpype.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, + get_assets, get_archived_assets ) from openpype.pipeline import legacy_io @@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): - # processing starts here if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return @@ -25,161 +23,240 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if not legacy_io.Session: legacy_io.install() - project_name = legacy_io.active_project() hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.project = None - self.import_to_avalon(context, project_name, hierarchy_context) + project_name = context.data["projectName"] + asset_names = self.extract_asset_names(hierarchy_context) - def import_to_avalon( + asset_docs_by_name = {} + for asset_doc in get_assets(project_name, asset_names=asset_names): + name = asset_doc["name"] + asset_docs_by_name[name] = asset_doc + + archived_asset_docs_by_name = collections.defaultdict(list) + for asset_doc in get_archived_assets( + project_name, asset_names=asset_names + ): + name = asset_doc["name"] + archived_asset_docs_by_name[name].append(asset_doc) + + project_doc = None + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data, None)) + + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, entity_data, parent = item + + entity_type = entity_data["entity_type"] + if entity_type.lower() == "project": + new_parent = project_doc = self.sync_project( + context, + entity_data + ) + + else: + new_parent = self.sync_asset( + context, + name, + entity_data, + parent, + project_doc, + asset_docs_by_name, + archived_asset_docs_by_name + ) + + children = entity_data.get("childs") + if not children: + continue + + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data, new_parent)) + + def extract_asset_names(self, hierarchy_context): + """Extract all possible asset names from hierarchy context. + + Args: + hierarchy_context (Dict[str, Any]): Nested hierarchy structure. + + Returns: + Set[str]: All asset names from the hierarchy structure. + """ + + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data)) + + asset_names = set() + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, data = item + if data["entity_type"].lower() != "project": + asset_names.add(name) + + children = data.get("childs") + if children: + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data)) + return asset_names + + def sync_project(self, context, entity_data): + project_doc = context.data["projectEntity"] + + if "data" not in project_doc: + project_doc["data"] = {} + current_data = project_doc["data"] + + changes = {} + entity_type = entity_data["entity_type"] + if current_data.get("entityType") != entity_type: + changes["entityType"] = entity_type + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + if key not in current_data or current_data[key] != value: + update_key = "data.{}".format(key) + changes[update_key] = value + current_data[key] = value + + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": project_doc["_id"]}, + {"$set": changes} + ) + return project_doc + + def sync_asset( self, context, - project_name, - input_data, - parent=None, + asset_name, + entity_data, + parent, + project, + asset_docs_by_name, + archived_asset_docs_by_name ): - for name in input_data: - self.log.info("input_data[name]: {}".format(input_data[name])) - entity_data = input_data[name] - entity_type = entity_data["entity_type"] + project_name = project["name"] + # Prepare data for new asset or for update comparison + data = { + "entityType": entity_data["entity_type"] + } - data = {} - data["entityType"] = entity_type + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + data[key] = value - # Custom attributes. - for k, val in entity_data.get("custom_attributes", {}).items(): - data[k] = val + data["inputs"] = entity_data.get("inputs") or [] - if entity_type.lower() != "project": - data["inputs"] = entity_data.get("inputs", []) + # Parents and visual parent are empty if parent is project + parents = [] + parent_id = None + if project["_id"] != parent["_id"]: + parent_id = parent["_id"] + # Use parent's parents as source value + parents.extend(parent["data"]["parents"]) + # Add parent's name to parents + parents.append(parent["name"]) - # Tasks. - tasks = entity_data.get("tasks", {}) - if tasks is not None or len(tasks) > 0: - data["tasks"] = tasks - parents = [] - visualParent = None - # do not store project"s id as visualParent - if self.project is not None: - if self.project["_id"] != parent["_id"]: - visualParent = parent["_id"] - parents.extend( - parent.get("data", {}).get("parents", []) - ) - parents.append(parent["name"]) - data["visualParent"] = visualParent - data["parents"] = parents + data["visualParent"] = parent_id + data["parents"] = parents - update_data = True - # Process project - if entity_type.lower() == "project": - entity = get_project(project_name) - # TODO: should be in validator? - assert (entity is not None), "Did not find project in DB" - - # get data from already existing project - cur_entity_data = entity.get("data") or {} - cur_entity_data.update(data) - data = cur_entity_data - - self.project = entity - # Raise error if project or parent are not set - elif self.project is None or parent is None: - raise AssertionError( - "Collected items are not in right order!" + asset_doc = asset_docs_by_name.get(asset_name) + # --- Create/Unarchive asset and end --- + if not asset_doc: + # Just use tasks from entity data as they are + # - this is different from the case when tasks are updated + data["tasks"] = entity_data.get("tasks") or {} + archived_asset_doc = None + for archived_entity in archived_asset_docs_by_name[asset_name]: + archived_parents = ( + archived_entity + .get("data", {}) + .get("parents") ) - # Else process assset - else: - entity = get_asset_by_name(project_name, name) - if entity: - # Do not override data, only update - cur_entity_data = entity.get("data") or {} - entity_tasks = cur_entity_data["tasks"] or {} + if data["parents"] == archived_parents: + archived_asset_doc = archived_entity + break - # create tasks as dict by default - if not entity_tasks: - cur_entity_data["tasks"] = entity_tasks - - new_tasks = data.pop("tasks", {}) - if "tasks" not in cur_entity_data and not new_tasks: - continue - for task_name in new_tasks: - if task_name in entity_tasks.keys(): - continue - cur_entity_data["tasks"][task_name] = new_tasks[ - task_name] - cur_entity_data.update(data) - data = cur_entity_data - else: - # Skip updating data - update_data = False - - archived_entities = get_archived_assets( - project_name, - asset_names=[name] - ) - unarchive_entity = None - for archived_entity in archived_entities: - archived_parents = ( - archived_entity - .get("data", {}) - .get("parents") - ) - if data["parents"] == archived_parents: - unarchive_entity = archived_entity - break - - if unarchive_entity is None: - # Create entity if doesn"t exist - entity = self.create_avalon_asset( - name, data - ) - else: - # Unarchive if entity was archived - entity = self.unarchive_entity(unarchive_entity, data) - - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - entity + # Create entity if doesn't exist + if archived_asset_doc is None: + return self.create_avalon_asset( + asset_name, data, project ) - if update_data: - # Update entity data with input data - legacy_io.update_many( - {"_id": entity["_id"]}, - {"$set": {"data": data}} - ) + return self.unarchive_entity( + archived_asset_doc, data, project + ) - if "childs" in entity_data: - self.import_to_avalon( - context, project_name, entity_data["childs"], entity - ) + # --- Update existing asset --- + # Make sure current entity has "data" key + if "data" not in asset_doc: + asset_doc["data"] = {} + cur_entity_data = asset_doc["data"] + cur_entity_tasks = cur_entity_data.get("tasks") or {} - def unarchive_entity(self, entity, data): + # Tasks + data["tasks"] = {} + new_tasks = entity_data.get("tasks") or {} + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + if task_name in cur_entity_tasks: + src_task_info = deepcopy(cur_entity_tasks[task_name]) + src_task_info.update(task_info) + task_info = src_task_info + + data["tasks"][task_name] = task_info + + changes = {} + for key, value in data.items(): + if key not in cur_entity_data or value != cur_entity_data[key]: + update_key = "data.{}".format(key) + changes[update_key] = value + cur_entity_data[key] = value + + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + asset_doc + ) + + # Update asset in database if necessary + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": asset_doc["_id"]}, + {"$set": changes} + ) + return asset_doc + + def unarchive_entity(self, archived_doc, data, project): # Unarchived asset should not use same data - new_entity = { - "_id": entity["_id"], + asset_doc = { + "_id": archived_doc["_id"], "schema": "openpype:asset-3.0", - "name": entity["name"], - "parent": self.project["_id"], + "name": archived_doc["name"], + "parent": project["_id"], "type": "asset", "data": data } legacy_io.replace_one( - {"_id": entity["_id"]}, - new_entity + {"_id": archived_doc["_id"]}, + asset_doc ) - return new_entity + return asset_doc - def create_avalon_asset(self, name, data): + def create_avalon_asset(self, name, data, project): asset_doc = { "schema": "openpype:asset-3.0", "name": name, - "parent": self.project["_id"], + "parent": project["_id"], "type": "asset", "data": data } @@ -194,27 +271,27 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): project_name, asset_doc ): + asset_name = asset_doc["name"] + new_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(new_parents) + parent_name = project_name + if new_parents: + parent_name = new_parents[-1] + for instance in context: - # Skip instance if has filled asset entity - if instance.data.get("assetEntity"): + # Skip if instance asset does not match + instance_asset_name = instance.data.get("asset") + if asset_name != instance_asset_name: continue - asset_name = asset_doc["name"] - inst_asset_name = instance.data["asset"] - if asset_name == inst_asset_name: - instance.data["assetEntity"] = asset_doc + instance_asset_doc = instance.data.get("assetEntity") + # Update asset entity with new possible changes of asset document + instance.data["assetEntity"] = asset_doc - # get parenting data - parents = asset_doc["data"].get("parents") or list() - - # equire only relative parent - parent_name = project_name - if parents: - parent_name = parents[-1] - - # update avalon data on instance + # Update anatomy data if asset was not set on instance + if not instance_asset_doc: instance.data["anatomyData"].update({ - "hierarchy": "/".join(parents), + "hierarchy": hierarchy, "task": {}, "parent": parent_name }) @@ -241,7 +318,7 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): hierarchy_context = context.data["hierarchyContext"] active_assets = [] - # filter only the active publishing insatnces + # filter only the active publishing instances for instance in context: if instance.data.get("publish") is False: continue From d92448f923a8da0e094c5b78e645c40b45d6f363 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:02:08 +0100 Subject: [PATCH 115/202] celaction: adding deadline custom plugin --- .../CelAction/CelAction.ico | Bin 0 -> 103192 bytes .../CelAction/CelAction.param | 38 ++++++ .../CelAction/CelAction.py | 121 ++++++++++++++++++ 3 files changed, 159 insertions(+) create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param create mode 100644 openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico new file mode 100644 index 0000000000000000000000000000000000000000..39d61592fe1addb07ed3ef93de362370485a23b9 GIT binary patch literal 103192 zcmeHQ2V4_L7oQ+17Eb(ZC>9jYh9aJ*CI!Yvtch-C}%GyqF4^>y<&9SCuIvvd)< zyn}(Z{5E{Be(h$pp=tdg_5&PnE+#$Pwd3M5(5L>QAp?FM?UI3!p_ z9*8`qYq`0$MK^tmffk0xHhFINd+5)x4>?bta1u{GnrxB#e4a(@6_1Wb=1x7gZK{30 z4oB?j%rLKBZ~mJ$-ri`WK@-vR>td5!SF1CNKeFsoyjQHrad{on_@E2l@k07({*I8r zZGeF?9Co)mCRH~+_Q~SvPrD~HFB?@a#(9pO(Ph+?NzK;wM@JHp2Mv!q>)T;Z z#><(HHePt##ChLlCj;b?y!?EJ9ZTk>@(n{fI#2RBzwOtg57Vr5QCRv(;fPt++K;%g zvY&B^9%{~x7q!>>>xBt2pTItBtUGNxvcIv&m(p!*FmXq=j?@FEe@vh{l4@JZ@*+$18?8#QTLrLpi{|1*jAsSr?~a%;lS=?HIvnAP4M_@ zu9=2A8Zj-5;zWBs1UOD?mc7F6gHf{k@oq+eBU+@e%$Na*bEpZ;g`<6Zmn4Z&jE1K8 zHg!%ozio%;Q`DvWK{gE=n(8tw`nc_-a*jQn*r2ajbStY@)%N#XVbuHYAUhz8}v zi9X+{?c`gx+57tKHm;9o%rNgV%gxgR?XO{BJ1C;BM{v@d_p{wRx1!D0vd4~e$cTC6 z>)!K``|ro8VeAn#ZS)$Bt5##>`!pZXn?F1D{>g1cr`*Gr>@%j06LSM%wrv-WPR!#6 zj%anu7`>fk&rUEqxY%{C`~IxqsQ2HsXJu7!ePb zpKmyboztS(L)YQ#_VrCYf(==nso?0t;Vaty8q{&hy?}`Q|BJQ_)$_2_Te$Q7`Heko zkB6RW$uL-EVcTy28qhS_Yv<>lP7RaicYY8us{YAK!OksD3w!+#;L~#XEVs>i41>O{ zALkfZ#!vC%x+`+We>=F3*+M*A;_75%#j?O)Fr zwZ?5#yNL7V$8GmKdvqf^e(=)IW_B+&da@EjT!x0Zoci)<&zpmKy1f{8B1AR=n(sqX zU(UX`sYiI%{GcWlwmP?J8`s79Xwv-b?XkLbijh@GmZg*>T>xXo= zqz!(wu%2;;);*~CyVC}{xEL*TywA=*ckJmOLD5IIvyKVtqp|3nOYiWQMb6^5q_eCv ze(1kxU>4g}qvO-I);on5=CyrWjavH)3hy^h>^Eh_Y`0+zO_A-Vo{M)49dmbNdU8*% zcu_*Dr6(3MMK;44Gg6;@W<9pu;?Q>mH!f3fHte^JE1F$vojo5|G1Nm{FX-pY*je-T z-M$`EFZ_^mLRjByELvwCUT@TnMs~JI7u!U8%^r+`dpCXGEh8-*?=* z+n`g8l}w$n$p32ZklksUs$1*3OtWME`RAUG?*|#B2(#;WzjnLD8a|)_V`0Cui^G4- zYT}Ul_`(CLYvRVkR&N?~DJ6&9Ib1KwZTw)gdGov;x$EW&8a)<Q+u<%9EzvHw^!@3A-{!ht1JCb$ed+iFS_b>s(HV4?Bdz1qwA~_lRHOyb$Gkm zV}1=!!DRbZ;ifzL9zXH2{#m!ve^9@N+s5BDt~bDl5gX%Sz*?U+xbDJQu`!uzMtX*I zi<>*!Ekf7SW2(re`2e=bAV4u>BZ|p2nW=~TH*P@DUWRX5dW@U<_tsfU#(2)=&g-7m z4j6BPMC1MZhu-w-`-6^a|9~*F7^@~TmW4GGTnTKpa&G!R$(<%x1xB-n8=El(EweTc zXglrdtsKYD_CrH|Ocags>AnBsmQZV+$;g?nBd=^1j*SrAVMlq6cDb<1LYLWL-gf=| z?t^k7=d@dJ(=+(Z|Cs((+dD_M`Z@BS&Q=$fCH)bh&qz%ec5cQ^UC!~Lfu@^{beO$% zUz^0p+s`rNcVyaMMjx6uIIPO|dEdlg*N}SQp>ZD#AHH7QY4xU*G=0a(_CvDc{**&O3LfEm-DxIww5PXK&?8ZfZp?t?%lpF z#o$cy<`19c$5{8eb@4;|U&+ardfW_a#_DC;yOAI6)u*LRy%Wz*m>?a8HLPLtKZd&q z`%md@xhtz)2(>e1f=!pZCrADD)WP+YcfBUdu8^qcF?4n*ruyzI7^on->2 zK+5qy2X%UFfTp%a{YI}69567vlX7(L?JuXn){{;SyBLtIb81q?nD9p|p2=TYm0`>B(nMtog^zK%MAQ#|K!NOuEbMeX;^{$K2-Cw_sy;FHS# z`mlkXFE3B3;l|v*Hzf8)$(d)q^_lRPp=*E7*)Pqix0%Md6!LG{`+eK;1Ap0KH2)W` zx#zuuCiOekV(26o*PQp^*(iND8MV@_?cih6kOP;JaTo8{{I=w@_nv?m&WHD&p75rr zS2eG!;U=ct&n_F8>6`5^IWBK{{j=PQV@?{-um7LpIN#oe=!bAQGg0>zT{c=<_BRnbm zDC}}G*=+Oi`AIf+53!xIbWGPiF+j%?XOC;cok&HnA8$CcX;_nNvBdhat?)}UuQJrusf ze_c$H*_~&*=VxE1!jc|rpLBkAC!SlkW23yApPq4ZeQ;|ea@Z9#X|1VqORi(J(+6+A z$nS9}ePWR7jY~dD?k=yjs6P_STpVb+@Z_Vgs3uMBrT8b+zHJ}R%V=Y$%Un9~@+5R@ zJ7WF%%z0dDWbl{36%!i?lKrFfZ3kW+R!4`~b$VvtFAG1M*^@G1ot|lskz1#^`Ad_3 z`J;326kU%gBbHjcuF;^k+orc;=Ca2;NqaIr|LKia>>drC7#$sSPHH7E)9yAid3n=I zJ`EkiCNx_KLpRTQbilI*gL9pKbLP32F>Q668qWQDaa+oAvqQrB;U@X3F0!9_&v};S z9kA?Zde)9>^YblM8^-TCK7Z=RFKrDN=I5RXr?u_xbgv`-EcaBpYjgKNn*{bhYcCG6 z*%iZBdSj)y!-H8ahO->+AHF`tYxMM4OP+DPQ>I>@!?yQ1`(Vz@&_9#1@2x%X>ob#U zBHOdqFGgAGTJ+H|?y#cS@4fd#ZrRGLxxv%ok1nHlj_18yGcs?i2^gP~@B5DRAjzgj zw>Y0mm-H_*qOG>Hq0kqbKv z1wfm@@OwSHbNm9s+=Uh^FudoiD|`!WeH+Qk$TwS-32O_9c}qMSd1;lN{e!09`X}c<2DKCi=s{!nXR@f`OIp&_*UREZ5_!TjGEmjT;r2>F_t|DPfEfEd-X9iR(y(8W&B*^zRR z@TY6M`-=0#P}KbAwav?Ny% zf#TNx1&Q-0(Wekfl*9byDDTZ?igT;3(y>QTJ`z`rfHM6HUe2Pv^&{`YexiTXs#Pmj znf~+Bk9_Our^NhJqJOnCthx}Z)^sgG9 zs-8>qU)43NT0WwG)%aBPT%!M~u3^>k5&f&ir>f@?{a1AjtCo-GUo}2eJ(uXes%u!a zd_@1M@u}*$ME_M?!>Z*Y`d5ulRnH~*uj(3BEuXgP{|oOG@&8b*TD5YO>7O6>+c)q3 zBYf>a^sknNRTomGf9yxgkF}?;j~(^}`__p6+)=6SiT|%^T`S8iPXF5K6Ru1~l2$wd zME}KO^F8Dc{eKTyE?!2W|KhRv9&%*$&%I|%3E|xu?xUVX=2x6swIz8a%>{*Twm@A* zk@~!ternAld9j0v)Rpx8%B4vCymvb(UX+Dg`R_qznv{{&KYpJ|EKHZ&_u-HmX)cE= zo)2;#bb$9RUYB@pn4fW)Vu$M%sV_)cQes_7#HM)BRz>P^A`Gc7+=Qa_#rYKXVdui~ zn#AUNXp7cAFUC$D+tTVmzBZMgd0x2aNepsAfF#Us!*@={U{!%M65eQErZdy|cq_IG*uE^S{`xxD1O4?_vE<2~3w;kR3=vh3fx zY%oq5gWlzIPul^E2^Yvd%2n4w;2iG&x=yF{&AHp0;=;W@8}9$;G-3l)Q~!#(=Or$r zgs{#cgnM;?;2p(*_1!`nfH`5#Kd5fiMB@9C3Xkca+rsa0aigszddIpiq`WMO1L-n0 z5J7C9YS;?QoAEWjP`5l-S1HuHtPQ|=8nJ<@p>stkVEixq9tQ`W?+JzfQUZ9MSA;n6 z6z5T|LR$g5Nr3x>c8cwhxOO2>wKbtpRy$X*lF_??93 zkW~|z??);;rvp7ksG)CVV?sKy0qwUHsSP~&&juJ2=Wa8K1J6>G(tQ(ITS$jBg3j8H z!uKtK#0EmpW!eVt{l5@=zm(n=z&!%KXQ0bMh`-}3`HtK7jiPo-kJkn8dLOOVl5Btv zK9_V^VguT(dzyQR4TPdIv|jO=umF55iyH?NZ2;z!ztL@xPmAbdnG5F(+Cu?f!s&Mj{}((6m4KU-3|%07Xh#V7y4Sj20uFiE24NC_!vOp zGIV*UyHI;2C!Es=U_H`0#rowXEuy#yPD+WR@V*e#-AuPbLhVHW^f(V}AivNKWS$?D zltyeod-X2MIiA;*!v=^ynJk!E`dexPqLR*eW%)1;_z-_GEmgb>+h7CZK)-BS$6R=y zm!E#P7&!v2l{P^9$+T35uZ}* zXUa>W@O^`V4G_Q5id9jCw~lQ9t_P0_^%|}ttK*V>XX0x~g)>Pys89rmPho|^s}+rx z*9Kr5vCs-Z36sSlkQ0RTU_6)cD}{Y!RQZ+4?=wey3X4Tndoq;y6yoDnVH;j}meq&|fQ=sBS(e=;8U70~5y2-lH@#kEV)wF?2b7O3_q%+H~)ZP5CM z^1<3AA>prTAL3x@Si`K5xzMI3IlP<6inSJSMlF|eLf&f8)AV3fx z2oMAa0t5kq06~BtKoB4Z5CjMU1Ob8oL4Y7Y5FiK;1PB5I0fGQQfFM8+AP5iy2m%BF zf&f9_>mvZKx{#x^2q0t#n?$(N)x-!H=pkfF$3+O4>&nNCOBt7|uh>3(OdR)5cUSHZ z+Q)}1|0GWED6>5X+;gbc@JAo4!j=kBQtQz<1tGJheB7g&d>k_R3y&;3&}GeK=KTT6c~W>MXo(ckpZRx1E~rOuoM^&C@>&WV4z5cGJ}tV zldmpu=^2n$fg%DT1%$*3aY_ME%EMh=1x)!U^-dv9qRJ7#8(i1{c>pnxG5m+a4xt8h z3P=(s{|EvE0fGQQfFM8+AP5iy%8dZLU)U=*3^}Q!P}CN_kF5)3e*qc+@1^O%UPMMJ z>)tJR)By0#RD=J=tPoTO%6*n8n+NY` z;=MfsWzv-EKMW;S5c}nB7WCQ%`1c2jQAKdRcY(5fp$x_=0)CHxa{|jBrK%p}1p!W` zX5I^1lzI#`# za6HrqvlY+?$ex4c=fR%4nm+i?Cu@!TeAstd(}y5wA(3BtZdHKkYNScp{)zsH{gd-w zvj~v!kBtBH+)5*Zbp=AYt0%zw8norPHV3Jk~!cbOA22HH9@?`wuQ3ewktq& z8flW&KjyI_Nh8cwKqC_QnR*mAvTLFI-1}{d*Z}u_o5Fma5w$-S@SJYd$K@-xrNib#p`vMAo8UQ!I2 zQC~Q5a``yUxznfs11KlXXHog-$G&y>-?GDXDPD9RiU)g)OU5nwz8$Aa(r|hKaie~c z$b&KB+-XplUYIX)o^bCOQ*1c5pe#=##;V~)U|q60tj%o1>oc&Hy-)d(X^9BsViROti}lXWyjbwP0dXG1eKrwezyfS~m8)?rs0=b%fvj9^tUbkpH7ZQp zL}!Y9`v;5x7xJt8o|_<}4c0dhmRXT_kTzTkR9P+-0R#N}GK&p!H{P2q6z`pvgt#we zVLXJ8KdxLIXhCHd^SXen8QdqoP(qlq(cd58z1fU05CH~GS|CQbSOgi{f~@cGclU&% z+(Hca@Am=*3^C--DOU$tP#I)w2^b`CVBX6A7y#cN@Rtzx=@@!!aMJ=Y%EcmJU<4S% z0nsumh8xS(ffiJTXZnpme#y6~IB=hfzeh?F8}19iU&i}e_UF+A#PI#@dVs+_4y*&< z?{U&&qTo{~#lBSsW8eqh->IgFzP{Bq;9Mx2BMZ0*|6u_4`E0n)hy0UZD>c4V{x!iD z$X65W;0nH{$KPQQ!gqKu@pHlf1Ic>N6)00%6McQFZD1XPu1~{tP;o4z<6+oVzE%D; z!52Q)3owA|0;~(MZyWqax9VaX?%**)6McQFZQwB<#=YJ68i3~q^q9z(uI~W`?%yi! zn&b=Q+>C92|L8>TI(uyo;MY;j8G)_tL zDX)SKH1Z$s6JZYI1XQ?BL|d{?1i(QKf&f8)AW*3Yz`BSD&}E?eO6VM{nY4oR^SBJG z9km5NsY6Q2;kwtM>{nk~f__f`(d%1^;SR{dM5iBv->x!oI-L%$eQjj`FKblr18ezc z7LZt42AAC<{Y~>M8}9hp{DW^=HQ;9`*oC7K8VB!U<7W~BAa4lkhQx=@K}k7BSg#%q z&o8QfZT_{rKfr!IP?Cn|tNRTA-`9XBLE?N$^!a|7NSNYEp9A4vTc0%lqE9!=JYW98 zi6{J*QE>!+3Vhp%{glys@bfXr_@nLTZ+_P0GLJtZ*4;`UfABjwyl5*R>rxZvPQyy) zpB=7Kz(Y9r9N_mTXrg^Fm^J@P!@nhSQt;D|FIOqoue`TjRNjYul06{s`%+@TdrB-~ z1LabJq(u^5@%w>d@TZZa=YOK`{u5XReC@?)(sySng@61`5auoKy$|rDBYE#fEO=YM zFYqczoF|oGEW8)=u^=7Zk>bS;hIEERhP)4d<1&@OKi<>iDNwwW@b8y0aXS4uq$Max z$7SjIu4Nqu%Tf>e#Xi~TKu5u+rqI6(_S!14hZxN55m1qIWo664OKIx>&vxJ&$L6OW zrMLy}zFq?UYL&K~$|?{1`(U3>0p}?tV8>G2nEu#)gW+CFD}0-PPrOIlKR}XmQzpEn zEizzxf%{f}&V3W`F**hB=$wc5QOjGnwF-m$|?(dRtFh;xv@hi0qY*cXWgN=&nIEKdI5Q@D~qqv^b5A61N_Ra zRng}~Dy08dnzkydEcjkxzr#)JJB`HH*N_kQ#QYEY@f;@&&Ihg7SMGB#$N)ZboXD{|UU}063;I%+FUd!&aM%hs!6Ej>f>40HtsoR( z?<)w!*o_N93j2ZK5Pzu%hfLdo5I2D10%$-ILIXGyOG8SUP8}{tr{KW(>V?=(%NWQG zsL)T#fcR+v9ONhi0x(x;2lpE0GSnK<8Ui&2Y9*6y4)G4~&YH#!nB$EHlD%iZzn22x z{gd&&@C$(y^QUWy0cg87&}$&PHcHbn_vwhUM+m>q3QmY;^&6KS1X_o9ZHzI8lDdxrUWnkfOWJQlo0ms!EwCrJm=OA65k(? zUn9@1z#k2-qk6#@yB5e2$VwXG-^Ir@^xOpc-o=M!soa;d^I+{~tfs~kb=!gdC&Jog zKHeJ!_VoV(agANq)aD@+jp0Q1$rFHI0WW2Z7;tb_x2~qjf!ss5&nNQ)+!qx0#h=)| zS81wkb=v{Ce};ZJ92f^UFb=T8t8dWcJVzZqfCo!>?>z_a7sdbg1n^LE=ht3C0u0PK zk8Lt|$u1Np$^z@+5X||$>YgmnpDEz~p7&}QJfoBNG<@~%Jb~P8I8i?(bK^(iv0;=t z`qOk7z@aJd(L>YilJ>q21d7b5n6Pf)Atjti|5g;GAm=gnjq-0sSE*{2Qm0_$&y@`~b8V2;avJ zlM?=o(=a3|4=7{7e>$F>mv&f=_|F{KkY}l~GGsy->%8~7<&^=kyfi;Np4+Ka|J|;l z^*{KkU7!1~6U-S$O8f))Psd8`oACbKYWDv#|LK@ZaIbej@N%vn?4f*spD^7I$DzJS zN&ItT2HfC8cEi3{CF)99TQJ@?;=Y{wnD_dxL};U>vN9#5K^q2~du{IVUVFTO>v)-b z0-*1@oM`JXZrpHu&t0bdP@tY3=TYx1Sf-MAuB`lmwNHdt;0;wAeTMZ>7LdRlA(1UY JVj#NS{{bY%R3rcZ literal 0 HcmV?d00001 diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param new file mode 100644 index 0000000000..24c59d2005 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param @@ -0,0 +1,38 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Celaction Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=filename +Label=Executable +Category=Config +CategoryOrder=0 +CategoryIndex=0 +Description=The command executable to run +Required=false +DisableIfBlank=true + +[RenderNameSeparator] +Type=string +Label=RenderNameSeparator +Category=Config +CategoryOrder=0 +CategoryIndex=1 +Description=The separator to use for naming +Required=false +DisableIfBlank=true +Default=. diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py new file mode 100644 index 0000000000..d19adc4fb9 --- /dev/null +++ b/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py @@ -0,0 +1,121 @@ +from System.Text.RegularExpressions import * + +from Deadline.Plugins import * +from Deadline.Scripting import * + +import _winreg + +###################################################################### +# This is the function that Deadline calls to get an instance of the +# main DeadlinePlugin class. +###################################################################### + + +def GetDeadlinePlugin(): + return CelActionPlugin() + + +def CleanupDeadlinePlugin(deadlinePlugin): + deadlinePlugin.Cleanup() + +###################################################################### +# This is the main DeadlinePlugin class for the CelAction plugin. +###################################################################### + + +class CelActionPlugin(DeadlinePlugin): + + def __init__(self): + self.InitializeProcessCallback += self.InitializeProcess + self.RenderExecutableCallback += self.RenderExecutable + self.RenderArgumentCallback += self.RenderArgument + self.StartupDirectoryCallback += self.StartupDirectory + + def Cleanup(self): + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.StartupDirectoryCallback + + def GetCelActionRegistryKey(self): + # Modify registry for frame separation + path = r'Software\CelAction\CelAction2D\User Settings' + _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path) + regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, + _winreg.KEY_ALL_ACCESS) + return regKey + + def GetSeparatorValue(self, regKey): + useSeparator, _ = _winreg.QueryValueEx( + regKey, 'RenderNameUseSeparator') + separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator') + + return useSeparator, separator + + def SetSeparatorValue(self, regKey, useSeparator, separator): + _winreg.SetValueEx(regKey, 'RenderNameUseSeparator', + 0, _winreg.REG_DWORD, useSeparator) + _winreg.SetValueEx(regKey, 'RenderNameSeparator', + 0, _winreg.REG_SZ, separator) + + def InitializeProcess(self): + # Set the plugin specific settings. + self.SingleFramesOnly = False + + # Set the process specific settings. + self.StdoutHandling = True + self.PopupHandling = True + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Rendering.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Wait.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Timeline Scrub.*") + + celActionRegKey = self.GetCelActionRegistryKey() + + self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault( + "RenderNameSeparator", ".").strip()) + + def RenderExecutable(self): + return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip()) + + def RenderArgument(self): + arguments = RepositoryUtils.CheckPathMapping( + self.GetPluginInfoEntry("Arguments").strip()) + arguments = arguments.replace( + "", str(self.GetStartFrame())) + arguments = arguments.replace("", str(self.GetEndFrame())) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetStartFrame()) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetEndFrame()) + arguments = arguments.replace("", "\"") + return arguments + + def StartupDirectory(self): + return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip() + + def ReplacePaddedFrame(self, arguments, pattern, frame): + frameRegex = Regex(pattern) + while True: + frameMatch = frameRegex.Match(arguments) + if frameMatch.Success: + paddingSize = int(frameMatch.Groups[1].Value) + if paddingSize > 0: + padding = StringUtils.ToZeroPaddedString( + frame, paddingSize, False) + else: + padding = str(frame) + arguments = arguments.replace( + frameMatch.Groups[0].Value, padding) + else: + break + + return arguments From 4a9b214d8daf7b93634248518572659dc3094001 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 25 Nov 2022 12:12:58 +0100 Subject: [PATCH 116/202] fix sync of asset docs on instances --- .../plugins/publish/extract_hierarchy_avalon.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index a9c0593f9f..b2a6adc210 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -59,7 +59,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): else: new_parent = self.sync_asset( - context, name, entity_data, parent, @@ -67,6 +66,12 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ) + # make sure all relative instances have correct avalon data + self._set_avalon_data_to_relative_instances( + context, + project_name, + new_parent + ) children = entity_data.get("childs") if not children: @@ -132,7 +137,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): def sync_asset( self, - context, asset_name, entity_data, parent, @@ -140,7 +144,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): asset_docs_by_name, archived_asset_docs_by_name ): - project_name = project["name"] # Prepare data for new asset or for update comparison data = { "entityType": entity_data["entity_type"] @@ -219,13 +222,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): changes[update_key] = value cur_entity_data[key] = value - # make sure all relative instances have correct avalon data - self._set_avalon_data_to_relative_instances( - context, - project_name, - asset_doc - ) - # Update asset in database if necessary if changes: # Update entity data with input data From 04ac17455790ffdb6275ae31d0ec4bb4f595732e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:30:31 +0100 Subject: [PATCH 117/202] adding .eslintignore file for deadline plugin --- openpype/hosts/celaction/resources/.eslintignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 openpype/hosts/celaction/resources/.eslintignore diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore new file mode 100644 index 0000000000..17a45423da --- /dev/null +++ b/openpype/hosts/celaction/resources/.eslintignore @@ -0,0 +1 @@ +deadline_custom_plugin/CelAction/* \ No newline at end of file From 86608dd106f2421062a8746b85caa89a729cb714 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:40:53 +0100 Subject: [PATCH 118/202] trying better flake8 ignore --- openpype/hosts/celaction/resources/.eslintignore | 1 - openpype/hosts/celaction/resources/.flake8 | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) delete mode 100644 openpype/hosts/celaction/resources/.eslintignore create mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.eslintignore b/openpype/hosts/celaction/resources/.eslintignore deleted file mode 100644 index 17a45423da..0000000000 --- a/openpype/hosts/celaction/resources/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -deadline_custom_plugin/CelAction/* \ No newline at end of file diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 new file mode 100644 index 0000000000..6cc095c684 --- /dev/null +++ b/openpype/hosts/celaction/resources/.flake8 @@ -0,0 +1,3 @@ +[flake8] +exclude = + */deadline_custom_plugin/CelAction \ No newline at end of file From f8e3e99aad1af2c4571725f6be7c9617a4768218 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:42:29 +0100 Subject: [PATCH 119/202] flake8 ignore almost done --- setup.cfg | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 0a9664033d..a721b8e9f6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,8 @@ exclude = docs, */vendor, website, - openpype/vendor + openpype/vendor, + */deadline_custom_plugin/CelAction max-complexity = 30 From ceabbe0de37a09915661456dd073ab55a98f8deb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 12:44:34 +0100 Subject: [PATCH 120/202] flake8 file clearing --- openpype/hosts/celaction/resources/.flake8 | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 openpype/hosts/celaction/resources/.flake8 diff --git a/openpype/hosts/celaction/resources/.flake8 b/openpype/hosts/celaction/resources/.flake8 deleted file mode 100644 index 6cc095c684..0000000000 --- a/openpype/hosts/celaction/resources/.flake8 +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -exclude = - */deadline_custom_plugin/CelAction \ No newline at end of file From 6cefb2ec4b63744b0bed508b3d9671d69e022c57 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 15:01:14 +0100 Subject: [PATCH 121/202] celaction: deadline and settings improvements --- .../plugins/publish/collect_render_path.py | 26 +++++------ .../publish/submit_celaction_deadline.py | 5 +-- .../defaults/project_settings/celaction.json | 12 ++--- .../defaults/project_settings/deadline.json | 9 ++++ .../schema_project_celaction.json | 39 ++++------------ .../schema_project_deadline.json | 45 +++++++++++++++++++ 6 files changed, 82 insertions(+), 54 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index ec89fc2e35..e5871f8792 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -11,29 +11,29 @@ class CollectRenderPath(pyblish.api.InstancePlugin): families = ["render.farm"] # Presets - anatomy_render_key = None - publish_render_metadata = None + output_extension = "png" + anatomy_template_key_render_files = None + anatomy_template_key_metadata = None def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy_data["family"] = "render" - padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ - "frame": f"%0{padding}d", - "representation": "png", - "ext": "png" + "family": "render", + "representation": self.output_extension, + "ext": self.output_extension }) anatomy_filled = anatomy.format(anatomy_data) # get anatomy rendering keys - anatomy_render_key = self.anatomy_render_key or "render" - publish_render_metadata = self.publish_render_metadata or "render" + r_anatomy_key = self.anatomy_template_key_render_files + m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[anatomy_render_key]["folder"] - render_path = anatomy_filled[anatomy_render_key]["path"] + render_dir = anatomy_filled[r_anatomy_key]["folder"] + render_path = anatomy_filled[r_anatomy_key]["path"] + self.log.debug("__ render_path: `{}`".format(render_path)) # create dir if it doesnt exists try: @@ -47,9 +47,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(publish_render_metadata): + if anatomy_filled.get(m_anatomy_key): instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - publish_render_metadata]["folder"] + m_anatomy_key]["folder"] self.log.info("Metadata render path: `{}`".format( instance.data["publishRenderMetadataFolder"] )) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 8a3160e83d..e39c2c0061 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -7,11 +7,10 @@ import requests import pyblish.api -class ExtractCelactionDeadline(pyblish.api.InstancePlugin): +class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". + Renders are submitted to a Deadline Web Service. """ diff --git a/openpype/settings/defaults/project_settings/celaction.json b/openpype/settings/defaults/project_settings/celaction.json index a4a321fb27..dbe5625f06 100644 --- a/openpype/settings/defaults/project_settings/celaction.json +++ b/openpype/settings/defaults/project_settings/celaction.json @@ -1,13 +1,9 @@ { "publish": { - "ExtractCelactionDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10 + "CollectRenderPath": { + "output_extension": "png", + "anatomy_template_key_render_files": "render", + "anatomy_template_key_metadata": "render" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a..8e892bb67e 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -70,6 +70,15 @@ "department": "", "multiprocess": true }, + "CelactionSubmitDeadline": { + "enabled": true, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10 + }, "ProcessSubmittedJobOnFarm": { "enabled": true, "deadline_department": "", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json index 500e5b2298..15d9350c84 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json @@ -14,45 +14,24 @@ { "type": "dict", "collapsible": true, - "checkbox_key": "enabled", - "key": "ExtractCelactionDeadline", - "label": "ExtractCelactionDeadline", + "key": "CollectRenderPath", + "label": "CollectRenderPath", "is_group": true, "children": [ { - "type": "boolean", - "key": "enabled", - "label": "Enabled" + "type": "text", + "key": "output_extension", + "label": "Output render file extension" }, { "type": "text", - "key": "deadline_department", - "label": "Deadline apartment" - }, - { - "type": "number", - "key": "deadline_priority", - "label": "Deadline priority" + "key": "anatomy_template_key_render_files", + "label": "Anatomy template key: render files" }, { "type": "text", - "key": "deadline_pool", - "label": "Deadline pool" - }, - { - "type": "text", - "key": "deadline_pool_secondary", - "label": "Deadline pool (secondary)" - }, - { - "type": "text", - "key": "deadline_group", - "label": "Deadline Group" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "Deadline Chunk size" + "key": "anatomy_template_key_metadata", + "label": "Anatomy template key: metadata job file" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b..77d520c54a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -387,6 +387,51 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CelactionSubmitDeadline", + "label": "CelactionSubmitDeadline", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, + { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, + { + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" + }, + { + "type": "text", + "key": "deadline_pool_secondary", + "label": "Deadline pool (secondary)" + }, + { + "type": "text", + "key": "deadline_group", + "label": "Deadline Group" + }, + { + "type": "number", + "key": "deadline_chunk_size", + "label": "Deadline Chunk size" + } + ] + }, { "type": "dict", "collapsible": true, From ecbdac09e21f0f186d78cbb04c8030bbf1588e82 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:14:45 +0100 Subject: [PATCH 122/202] celaction: add local render target --- openpype/hosts/celaction/api/cli.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index e00a50cbec..4c07374b08 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -53,6 +53,7 @@ def main(): pyblish.api.register_plugin_path(PUBLISH_PATH) pyblish.api.register_host(PUBLISH_HOST) + pyblish.api.register_target("local") return host_tools.show_publish() From 6994e48a716e80a8b7cdb344d609826b0874b447 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:15:09 +0100 Subject: [PATCH 123/202] celaction: returning frame to anatomy data --- openpype/hosts/celaction/plugins/publish/collect_render_path.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index e5871f8792..f6db6c000d 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -18,7 +18,9 @@ class CollectRenderPath(pyblish.api.InstancePlugin): def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) + padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ + "frame": f"%0{padding}d", "family": "render", "representation": self.output_extension, "ext": self.output_extension From f8b00d7ad4a66b93886b48c8100f42fdf9b6fb25 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:16:39 +0100 Subject: [PATCH 124/202] celaction: submit job from published workfile --- .../publish/submit_celaction_deadline.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e39c2c0061..03d59b30fd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -80,6 +80,26 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): render_dir = os.path.normpath(os.path.dirname(render_path)) render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + + for item in instance.context: + if "workfile" in item.data["families"]: + msg = "Workfile (scene) must be published along" + assert item.data["publish"] is True, msg + + template_data = item.data.get("anatomyData") + rep = item.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = instance.context.data["anatomy"].format( + template_data) + template_filled = anatomy_filled["publish"]["path"] + script_path = os.path.normpath(template_filled) + + self.log.info( + "Using published scene for render {}".format(script_path) + ) + jobname = "%s - %s" % (script_name, instance.name) output_filename_0 = self.preview_fname(render_path) @@ -96,7 +116,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): chunk_size = self.deadline_chunk_size # search for %02d pattern in name, and padding number - search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups() + search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) From 2117df1ad2a314e600178dec6c10d977cdfdc6e6 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:38:14 +0100 Subject: [PATCH 125/202] move deadline plugin to correct place --- .../custom/plugins}/CelAction/CelAction.ico | Bin .../custom/plugins}/CelAction/CelAction.param | 0 .../custom/plugins}/CelAction/CelAction.py | 1 + setup.cfg | 2 +- 4 files changed, 2 insertions(+), 1 deletion(-) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.ico (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.param (100%) rename openpype/{hosts/celaction/resources/deadline_custom_plugin => modules/deadline/repository/custom/plugins}/CelAction/CelAction.py (98%) diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.ico rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param similarity index 100% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.param rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param diff --git a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py similarity index 98% rename from openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py rename to openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py index d19adc4fb9..2d0edd3dca 100644 --- a/openpype/hosts/celaction/resources/deadline_custom_plugin/CelAction/CelAction.py +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py @@ -71,6 +71,7 @@ class CelActionPlugin(DeadlinePlugin): # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Rendering.*") + self.AddPopupIgnorer(".*AutoRender.*") # Ignore 'celaction' Pop-up dialog self.AddPopupIgnorer(".*Wait.*") diff --git a/setup.cfg b/setup.cfg index a721b8e9f6..10cca3eb3f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,7 +9,7 @@ exclude = */vendor, website, openpype/vendor, - */deadline_custom_plugin/CelAction + *deadline/repository/custom/plugins max-complexity = 30 From 891a7e7609b9bd296af0923f3c508f9b8a203689 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:47:22 +0100 Subject: [PATCH 126/202] celaction: restructuring cli.py and kwargs --- openpype/hosts/celaction/api/__init__.py | 1 - openpype/hosts/celaction/hooks/pre_celaction_setup.py | 11 +++++------ .../plugins/publish/collect_celaction_cli_kwargs.py | 10 +++++----- .../plugins/publish/collect_celaction_instances.py | 3 ++- openpype/hosts/celaction/scripts/__init__.py | 1 + .../celaction/{api/cli.py => scripts/publish_cli.py} | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) delete mode 100644 openpype/hosts/celaction/api/__init__.py create mode 100644 openpype/hosts/celaction/scripts/__init__.py rename openpype/hosts/celaction/{api/cli.py => scripts/publish_cli.py} (93%) diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py deleted file mode 100644 index 8c93d93738..0000000000 --- a/openpype/hosts/celaction/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -kwargs = None diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index cde3a0c723..21ff38b701 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,12 +1,11 @@ import os import shutil -import subprocess import winreg from openpype.lib import PreLaunchHook, get_openpype_execute_args -from openpype.hosts.celaction import api as caapi +from openpype.hosts.celaction import scripts -CELACTION_API_DIR = os.path.dirname( - os.path.abspath(caapi.__file__) +CELACTION_SCRIPTS_DIR = os.path.dirname( + os.path.abspath(scripts.__file__) ) @@ -38,7 +37,7 @@ class CelactionPrelaunchHook(PreLaunchHook): winreg.KEY_ALL_ACCESS ) - path_to_cli = os.path.join(CELACTION_API_DIR, "cli.py") + path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) openpype_executables = subproces_args.pop(0) @@ -108,7 +107,7 @@ class CelactionPrelaunchHook(PreLaunchHook): if not os.path.exists(workfile_path): # TODO add ability to set different template workfile path via # settings - openpype_celaction_dir = os.path.dirname(CELACTION_API_DIR) + openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR) template_path = os.path.join( openpype_celaction_dir, "resources", diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 15c5ddaf1c..b82c0f5648 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -9,15 +9,15 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - kwargs = celaction.kwargs.copy() + passing_kwargs = scripts.PASSING_KWARGS.copy() self.log.info("Storing kwargs: %s" % kwargs) - context.set_data("kwargs", kwargs) + context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values - for k, v in kwargs.items(): + for k, v in passing_kwargs.items(): self.log.info(f"Setting `{k}` to instance.data with value: `{v}`") if k in ["frameStart", "frameEnd"]: - context.data[k] = kwargs[k] = int(v) + context.data[k] = passing_kwargs[k] = int(v) else: context.data[k] = v diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index b5f99a1416..35ac7fc264 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -36,7 +36,8 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin): "version": version } - celaction_kwargs = context.data.get("kwargs", {}) + celaction_kwargs = context.data.get( + "passingKwargs", {}) if celaction_kwargs: shared_instance_data.update(celaction_kwargs) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py new file mode 100644 index 0000000000..dfd9b37ae2 --- /dev/null +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -0,0 +1 @@ +PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/scripts/publish_cli.py similarity index 93% rename from openpype/hosts/celaction/api/cli.py rename to openpype/hosts/celaction/scripts/publish_cli.py index 4c07374b08..586880dc4c 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -7,7 +7,7 @@ import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import api as celaction +from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -41,7 +41,7 @@ def cli(): parser.add_argument("--resolutionHeight", help=("Height of resolution")) - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ + scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ def main(): From 91580fdb301dd9907660a6a2b145b775f961ac97 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:51:44 +0100 Subject: [PATCH 127/202] celaction: hook should define data from asset_doc rather then from project doc --- openpype/hosts/celaction/hooks/pre_celaction_setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index 21ff38b701..a8fc7f322e 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -18,9 +18,9 @@ class CelactionPrelaunchHook(PreLaunchHook): platforms = ["windows"] def execute(self): - project_doc = self.data["project_doc"] - width = project_doc["data"]["resolutionWidth"] - height = project_doc["data"]["resolutionHeight"] + asset_doc = self.data["asset_doc"] + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] # Add workfile path to launch arguments workfile_path = self.workfile_path() From a4e6f67692e46208bb80465987359aeebc610ca3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 16:56:19 +0100 Subject: [PATCH 128/202] celaction: fix kwargs print --- .../plugins/publish/collect_celaction_cli_kwargs.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index b82c0f5648..e552e9ba6a 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api from openpype.hosts.celaction import scripts +from pprint import pformat class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -11,7 +12,10 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): def process(self, context): passing_kwargs = scripts.PASSING_KWARGS.copy() - self.log.info("Storing kwargs: %s" % kwargs) + self.log.info("Storing kwargs ...") + self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) + + # set kwargs to context data context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values From 30550c26b01bbe502abaf74562408bd0f1308475 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:04:19 +0100 Subject: [PATCH 129/202] celaction: render from published workfile fix --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 03d59b30fd..3be864781f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -82,7 +82,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): script_name = os.path.basename(script_path) for item in instance.context: - if "workfile" in item.data["families"]: + if "workfile" in item.data["family"]: msg = "Workfile (scene) must be published along" assert item.data["publish"] is True, msg From 0c475d5b9d1edaad6ce1c47f16e87f297c3443c4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:40:23 +0100 Subject: [PATCH 130/202] celaction: deadline job delay settings --- .../publish/submit_celaction_deadline.py | 78 ++++++++++++++----- .../defaults/project_settings/deadline.json | 3 +- .../schema_project_deadline.json | 5 ++ 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 3be864781f..f716621d59 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -5,7 +5,7 @@ import getpass import requests import pyblish.api - +from openpype.pipeline import legacy_io class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -25,12 +25,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): deadline_pool_secondary = "" deadline_group = "" deadline_chunk_size = 1 - - enviro_filter = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] + deadline_job_delay = "00:00:08:00" def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" @@ -163,10 +158,11 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): # frames from Deadline Monitor "OutputFilename0": output_filename_0.replace("\\", "/"), - # # Asset dependency to wait for at least the scene file to sync. + # # Asset dependency to wait for at least + # the scene file to sync. # "AssetDependency0": script_path "ScheduledType": "Once", - "JobDelay": "00:00:08:00" + "JobDelay": self.deadline_job_delay }, "PluginInfo": { # Input @@ -191,18 +187,58 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - i = 0 - for key, values in dict(os.environ).items(): - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % i: "{key}={value}".format( - key=key, value=values - ) - } - ) - i += 1 + # Include critical environment variables with submission + keys = [ + "PYTHONPATH", + "PATH", + "AVALON_PROJECT", + "AVALON_ASSET", + "AVALON_TASK", + "AVALON_APP_NAME", + "FTRACK_API_KEY", + "FTRACK_API_USER", + "FTRACK_SERVER", + "PYBLISHPLUGINPATH", + "NUKE_PATH", + "TOOL_ENV", + "FOUNDRY_LICENSE", + "OPENPYPE_VERSION" + ] + # Add mongo url if it's enabled + if instance.context.data.get("deadlinePassMongoUrl"): + keys.append("OPENPYPE_MONGO") + + # add allowed keys from preset if any + if self.env_allowed_keys: + keys += self.env_allowed_keys + + environment = dict({ + key: os.environ[key] for key in keys + if key in os.environ}, **legacy_io.Session + ) + + for _path in os.environ: + if _path.lower().startswith('openpype_'): + environment[_path] = os.environ[_path] + + # to recognize job from OPENPYPE for turning Event On/Off + environment.update({ + "OPENPYPE_LOG_NO_COLORS": "1", + "OPENPYPE_RENDER_JOB": "1" + }) + + # finally search replace in values of any key + if self.env_search_replace_values: + for key, value in environment.items(): + for _k, _v in self.env_search_replace_values.items(): + environment[key] = value.replace(_k, _v) + + payload["JobInfo"].update({ + "EnvironmentKeyValue%d" % index: "{key}={value}".format( + key=key, + value=environment[key] + ) for index, key in enumerate(environment) + }) self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 8e892bb67e..6e1c0f3540 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -77,7 +77,8 @@ "deadline_pool": "", "deadline_pool_secondary": "", "deadline_group": "", - "deadline_chunk_size": 10 + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" }, "ProcessSubmittedJobOnFarm": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 77d520c54a..5295b0e9d6 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -429,6 +429,11 @@ "type": "number", "key": "deadline_chunk_size", "label": "Deadline Chunk size" + }, + { + "type": "text", + "key": "deadline_job_delay", + "label": "Delay job (timecode dd:hh:mm:ss)" } ] }, From eb1b6e037b09c20bca12ab335ec675aeae0b311e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:42:22 +0100 Subject: [PATCH 131/202] label readability --- .../schemas/projects_schema/schema_project_deadline.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index 5295b0e9d6..69f81ed682 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -392,7 +392,7 @@ "collapsible": true, "checkbox_key": "enabled", "key": "CelactionSubmitDeadline", - "label": "CelactionSubmitDeadline", + "label": "Celaction Submit Deadline", "is_group": true, "children": [ { From 9f5e892a678307dec9b3d577a0ae1364a680af06 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 17:49:09 +0100 Subject: [PATCH 132/202] removing redundant code --- .../plugins/publish/submit_celaction_deadline.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index f716621d59..e5b06b007c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -208,10 +208,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): if instance.context.data.get("deadlinePassMongoUrl"): keys.append("OPENPYPE_MONGO") - # add allowed keys from preset if any - if self.env_allowed_keys: - keys += self.env_allowed_keys - environment = dict({ key: os.environ[key] for key in keys if key in os.environ}, **legacy_io.Session @@ -227,12 +223,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB": "1" }) - # finally search replace in values of any key - if self.env_search_replace_values: - for key, value in environment.items(): - for _k, _v in self.env_search_replace_values.items(): - environment[key] = value.replace(_k, _v) - payload["JobInfo"].update({ "EnvironmentKeyValue%d" % index: "{key}={value}".format( key=key, From 3491ef73ac54a98cab2e759c81a211a44549ea37 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:25:41 +0100 Subject: [PATCH 133/202] remove nuke code --- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index e5b06b007c..0583e146a7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -199,9 +199,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): "FTRACK_API_USER", "FTRACK_SERVER", "PYBLISHPLUGINPATH", - "NUKE_PATH", "TOOL_ENV", - "FOUNDRY_LICENSE", "OPENPYPE_VERSION" ] # Add mongo url if it's enabled From f162ec56d76b1fe4a6b4267ea1fdfe3ca6ee6927 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:22 +0100 Subject: [PATCH 134/202] celaction: removing environment from job --- .../publish/submit_celaction_deadline.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 0583e146a7..9a7d74c1f7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -187,47 +187,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - # Include critical environment variables with submission - keys = [ - "PYTHONPATH", - "PATH", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", - "AVALON_APP_NAME", - "FTRACK_API_KEY", - "FTRACK_API_USER", - "FTRACK_SERVER", - "PYBLISHPLUGINPATH", - "TOOL_ENV", - "OPENPYPE_VERSION" - ] - # Add mongo url if it's enabled - if instance.context.data.get("deadlinePassMongoUrl"): - keys.append("OPENPYPE_MONGO") - - environment = dict({ - key: os.environ[key] for key in keys - if key in os.environ}, **legacy_io.Session - ) - - for _path in os.environ: - if _path.lower().startswith('openpype_'): - environment[_path] = os.environ[_path] - - # to recognize job from OPENPYPE for turning Event On/Off - environment.update({ - "OPENPYPE_LOG_NO_COLORS": "1", - "OPENPYPE_RENDER_JOB": "1" - }) - - payload["JobInfo"].update({ - "EnvironmentKeyValue%d" % index: "{key}={value}".format( - key=key, - value=environment[key] - ) for index, key in enumerate(environment) - }) - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) From 193112c18bcb17c829ea4afb6813823bf10d888e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 25 Nov 2022 22:58:53 +0100 Subject: [PATCH 135/202] deadline: adding openpype version to global job --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 6362b4ca65..249211e965 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -241,6 +241,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") + environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" From b74675ebb0fc360eaa9af4b8faa57d0bcd5e0541 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 26 Nov 2022 03:31:25 +0000 Subject: [PATCH 136/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index ffabcf8025..bf9f97d5f4 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7" +__version__ = "3.14.8-nightly.1" From 8eae3d395672f599f1fe8dea554ed1d83e1f50e3 Mon Sep 17 00:00:00 2001 From: Joseff Date: Sat, 26 Nov 2022 14:37:15 +0100 Subject: [PATCH 137/202] Update The #include for NotificationManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renamed the include to "Framework/Notifications/NotificationManager.h" Co-authored-by: Ondřej Samohel <33513211+antirotor@users.noreply.github.com> --- .../UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index c2c7e249c3..322663eeec 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -3,7 +3,7 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" #include "AssetToolsModule.h" -#include "NotificationManager.h" +#include "Framework/Notifications/NotificationManager.h" #include "SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking From c2e9bdf161b7b073a2133efb2ce9d04e2360540d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:35:40 +0100 Subject: [PATCH 138/202] celaction: DL improving code --- .../publish/submit_celaction_deadline.py | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 9a7d74c1f7..7913851d8a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,10 +2,9 @@ import os import re import json import getpass - import requests import pyblish.api -from openpype.pipeline import legacy_io + class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline @@ -194,10 +193,15 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): self.expected_files(instance, render_path) self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) + response = requests.post(self.deadline_url, json=payload) if not response.ok: - raise Exception(response.text) + self.log.error("Submission failed!") + self.log.error(response.status_code) + self.log.error(response.content) + self.log.debug(payload) + raise SystemExit(response.text) return response @@ -235,32 +239,29 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): split_path = path.split(split_patern) hashes = "#" * int(search_results[1]) return "".join([split_path[0], hashes, split_path[-1]]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - else: - return path - def expected_files(self, - instance, - path): + self.log.debug("_ path: `{}`".format(path)) + return path + + def expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): - instance.data["expectedFiles"] = list() + instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) - file = os.path.basename(path) + dirpath = os.path.dirname(filepath) + filename = os.path.basename(filepath) - if "#" in file: - pparts = file.split("#") + if "#" in filename: + pparts = filename.split("#") padding = "%0{}d".format(len(pparts) - 1) - file = pparts[0] + padding + pparts[-1] + filename = pparts[0] + padding + pparts[-1] - if "%" not in file: - instance.data["expectedFiles"].append(path) + if "%" not in filename: + instance.data["expectedFiles"].append(filepath) return for i in range(self._frame_start, (self._frame_end + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirpath, (filename % i)).replace("\\", "/") + ) From 1f6be563eb6b28ef0988d55e6a143fe0e98d6aa1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Sat, 26 Nov 2022 21:36:03 +0100 Subject: [PATCH 139/202] celaction: improving code PR feedback --- .../celaction/hooks/pre_celaction_setup.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py index a8fc7f322e..62cebf99ed 100644 --- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -1,6 +1,7 @@ import os import shutil import winreg +import subprocess from openpype.lib import PreLaunchHook, get_openpype_execute_args from openpype.hosts.celaction import scripts @@ -13,7 +14,6 @@ class CelactionPrelaunchHook(PreLaunchHook): """ Bootstrap celacion with pype """ - workfile_ext = "scn" app_groups = ["celaction"] platforms = ["windows"] @@ -39,28 +39,28 @@ class CelactionPrelaunchHook(PreLaunchHook): path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") subproces_args = get_openpype_execute_args("run", path_to_cli) - openpype_executables = subproces_args.pop(0) + openpype_executable = subproces_args.pop(0) winreg.SetValueEx( hKey, "SubmitAppTitle", 0, winreg.REG_SZ, - openpype_executables + openpype_executable ) parameters = subproces_args + [ - "--currentFile *SCENE*", - "--chunk *CHUNK*", - "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*" + "--currentFile", "*SCENE*", + "--chunk", "*CHUNK*", + "--frameStart", "*START*", + "--frameEnd", "*END*", + "--resolutionWidth", "*X*", + "--resolutionHeight", "*Y*" ] winreg.SetValueEx( hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters) + subprocess.list2cmdline(parameters) ) # setting resolution parameters From a2abcd252471c3cce2f56e0441043693df12bc0f Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:32:14 +0000 Subject: [PATCH 140/202] Optional viewport refresh on pointcache extraction --- .../hosts/maya/plugins/create/create_pointcache.py | 1 + .../hosts/maya/plugins/publish/extract_pointcache.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index ab8fe12079..cdec140ea8 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -28,6 +28,7 @@ class CreatePointCache(plugin.Creator): self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups self.data["worldSpace"] = True # Default to exporting world-space + self.data["refresh"] = False # Default to suspend refresh. # Add options for custom attributes self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 7c1c6d5c12..5f5532e60a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,13 +86,21 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - with suspended_refresh(): + if instance.data.get("refresh", False): with maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, **options) + else: + with suspended_refresh(): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) if "representations" not in instance.data: instance.data["representations"] = [] From 9e2f3ab8685dea532a4b452da0ec6e0ef5ee56da Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 09:56:22 +0000 Subject: [PATCH 141/202] Disable viewport Pan/Zoom on playblast extraction. --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b19d24fad7..04e3c7bccf 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -115,6 +115,10 @@ class ExtractPlayblast(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -135,6 +139,8 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 34d2f1252601a3fed9285a0b72b2ad759e463623 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:04:19 +0000 Subject: [PATCH 142/202] Hound --- openpype/hosts/maya/plugins/publish/extract_playblast.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index 04e3c7bccf..1f9f9db99a 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -118,7 +118,7 @@ class ExtractPlayblast(publish.Extractor): # Disable Pan/Zoom. pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) - + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -140,7 +140,7 @@ class ExtractPlayblast(publish.Extractor): path = capture.capture(log=self.log, **preset) cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) - + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) From 49ad1dbc97a6a1ea79f8ff273c5f275d4cd84282 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:12:02 +0000 Subject: [PATCH 143/202] Include thumbnail extraction --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2be..06244cf003 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -117,6 +117,10 @@ class ExtractThumbnail(publish.Extractor): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): # Force viewer to False in call to capture because we have our own # viewer opening call to allow a signal to trigger between @@ -136,6 +140,7 @@ class ExtractThumbnail(publish.Extractor): _, thumbnail = os.path.split(playblast) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) self.log.info("file list {}".format(thumbnail)) From 9aeb7898527d9d2892394402462251a1b89dfe87 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Tue, 29 Nov 2022 13:04:29 +0700 Subject: [PATCH 144/202] bugfix: Use unused 'paths' list --- openpype/tools/standalonepublish/widgets/widget_drop_frame.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index f8a8273b26..18c2b27678 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -178,7 +178,7 @@ class DropDataFrame(QtWidgets.QFrame): paths = self._get_all_paths(in_paths) collectionable_paths = [] non_collectionable_paths = [] - for path in in_paths: + for path in paths: ext = os.path.splitext(path)[1] if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) From decc8df4aef6eb1aef8e55152c3eea0760d1fad2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 29 Nov 2022 17:29:58 +0100 Subject: [PATCH 145/202] :construction: 3dsmax addon basics --- openpype/hosts/3dsmax/api/__init__.py | 0 openpype/hosts/3dsmax/plugins/__init__.py | 0 openpype/hosts/3dsmax/startup/startup.py | 2 - openpype/hosts/max/__init__.py | 10 ++ openpype/hosts/max/addon.py | 16 ++ openpype/hosts/max/api/__init__.py | 13 ++ openpype/hosts/max/api/lib.py | 2 + openpype/hosts/max/api/menu.py | 80 +++++++++ openpype/hosts/max/api/pipeline.py | 153 ++++++++++++++++++ openpype/hosts/max/hooks/set_paths.py | 17 ++ .../hosts/{3dsmax => max/plugins}/__init__.py | 0 .../hosts/{3dsmax => max}/startup/startup.ms | 0 openpype/hosts/max/startup/startup.py | 7 + openpype/settings/entities/enum_entity.py | 2 +- 14 files changed, 299 insertions(+), 3 deletions(-) delete mode 100644 openpype/hosts/3dsmax/api/__init__.py delete mode 100644 openpype/hosts/3dsmax/plugins/__init__.py delete mode 100644 openpype/hosts/3dsmax/startup/startup.py create mode 100644 openpype/hosts/max/__init__.py create mode 100644 openpype/hosts/max/addon.py create mode 100644 openpype/hosts/max/api/__init__.py create mode 100644 openpype/hosts/max/api/lib.py create mode 100644 openpype/hosts/max/api/menu.py create mode 100644 openpype/hosts/max/api/pipeline.py create mode 100644 openpype/hosts/max/hooks/set_paths.py rename openpype/hosts/{3dsmax => max/plugins}/__init__.py (100%) rename openpype/hosts/{3dsmax => max}/startup/startup.ms (100%) create mode 100644 openpype/hosts/max/startup/startup.py diff --git a/openpype/hosts/3dsmax/api/__init__.py b/openpype/hosts/3dsmax/api/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/plugins/__init__.py b/openpype/hosts/3dsmax/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/openpype/hosts/3dsmax/startup/startup.py b/openpype/hosts/3dsmax/startup/startup.py deleted file mode 100644 index dd8c08a6b9..0000000000 --- a/openpype/hosts/3dsmax/startup/startup.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -print("inside python startup") \ No newline at end of file diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py new file mode 100644 index 0000000000..8da0e0ee42 --- /dev/null +++ b/openpype/hosts/max/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + MaxAddon, + MAX_HOST_DIR, +) + + +__all__ = ( + "MaxAddon", + "MAX_HOST_DIR", +) \ No newline at end of file diff --git a/openpype/hosts/max/addon.py b/openpype/hosts/max/addon.py new file mode 100644 index 0000000000..734b87dd21 --- /dev/null +++ b/openpype/hosts/max/addon.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +import os +from openpype.modules import OpenPypeModule, IHostAddon + +MAX_HOST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class MaxAddon(OpenPypeModule, IHostAddon): + name = "max" + host_name = "max" + + def initialize(self, module_settings): + self.enabled = True + + def get_workfile_extensions(self): + return [".max"] diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py new file mode 100644 index 0000000000..b6998df862 --- /dev/null +++ b/openpype/hosts/max/api/__init__.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +"""Public API for 3dsmax""" + +from .pipeline import ( + MaxHost +) +from .menu import OpenPypeMenu + + +__all__ = [ + "MaxHost", + "OpenPypeMenu" +] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py new file mode 100644 index 0000000000..e50de85f68 --- /dev/null +++ b/openpype/hosts/max/api/lib.py @@ -0,0 +1,2 @@ +def imprint(attr, data): + ... diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py new file mode 100644 index 0000000000..13ca503b4d --- /dev/null +++ b/openpype/hosts/max/api/menu.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +"""3dsmax menu definition of OpenPype.""" +from abc import ABCMeta, abstractmethod +import six +from Qt import QtWidgets, QtCore +from pymxs import runtime as rt + +from openpype.tools.utils import host_tools + + +@six.add_metaclass(ABCMeta) +class OpenPypeMenu(object): + + def __init__(self): + self.main_widget = self.get_main_widget() + + @staticmethod + def get_main_widget(): + """Get 3dsmax main window.""" + return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) + + def get_main_menubar(self): + """Get main Menubar by 3dsmax main window.""" + return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] + + def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + menu_bar = self.get_main_menubar() + menu_items = menu_bar.findChildren( + QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) + help_action = None + for item in menu_items: + if name in item.title(): + # we already have OpenPype menu + return item + + if before in item.title(): + help_action = item.menuAction() + + op_menu = QtWidgets.QMenu("&OpenPype") + menu_bar.insertMenu(before, op_menu) + return op_menu + + def build_openpype_menu(self): + openpype_menu = self.get_or_create_openpype_menu() + load_action = QtWidgets.QAction("Load...", openpype_menu) + load_action.triggered.connect(self.load_callback) + openpype_menu.addAction(load_action) + + publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action.triggered.connect(self.publish_callback) + openpype_menu.addAction(publish_action) + + manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action.triggered.connect(self.manage_callback) + openpype_menu.addAction(manage_action) + + library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action.triggered.connect(self.library_callback) + openpype_menu.addAction(library_action) + + openpype_menu.addSeparator() + + workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action.triggered.connect(self.workfiles_callback) + openpype_menu.addAction(workfiles_action) + + def load_callback(self): + host_tools.show_loader(parent=self.main_widget) + + def publish_callback(self): + host_tools.show_publisher(parent=self.main_widget) + + def manage_callback(self): + host_tools.show_subset_manager(parent=self.main_widget) + + def library_callback(self): + host_tools.show_library_loader(parent=self.main_widget) + + def workfiles_callback(self): + host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py new file mode 100644 index 0000000000..2ee5989871 --- /dev/null +++ b/openpype/hosts/max/api/pipeline.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" +import os +import sys +import logging +import contextlib + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +import pyblish.api +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID, +) +from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api import lib +from openpype.hosts.max import MAX_HOST_DIR +from openpype.pipeline.load import any_outdated_containers +from openpype.lib import ( + register_event_callback, + emit_event, +) +from pymxs import runtime as rt # noqa + +log = logging.getLogger("openpype.hosts.max") + +PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + + +class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" + menu = None + + def __init__(self): + super(MaxHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("max") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + log.info("Building menu ...") + + self.menu = OpenPypeMenu() + + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + + # register_event_callback("before.save", before_save) + # register_event_callback("save", on_save) + # register_event_callback("open", on_open) + # register_event_callback("new", on_new) + + # pyblish.api.register_callback( + # "instanceToggled", on_pyblish_instance_toggled + # ) + + self._has_been_setup = True + + def has_unsaved_changes(self): + # TODO: how to get it from 3dsmax? + return True + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] + + def save_workfile(self, dst_path=None): + rt.saveMaxFile(dst_path) + return dst_path + + def open_workfile(self, filepath): + rt.checkForSave() + rt.loadMaxFile(filepath) + return filepath + + def get_current_workfile(self): + return os.path.join(rt.maxFilePath, rt.maxFileName) + + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + rt.callbacks.removeScript(id=rt.name(event.name)) + except RuntimeError as e: + log.info(e) + + rt.callbacks.addScript( + event.name, event.callback, id=rt.Name('OpenPype')) + + @staticmethod + def create_context_node(): + """Helper for creating context holding node.""" + + root_scene = rt.rootScene + + create_attr_script = (""" +attributes "OpenPypeContext" +( + parameters main rollout:params + ( + context type: #string + ) + + rollout params "OpenPype Parameters" + ( + editText editTextContext "Context" type: #string + ) +) + """) + + attr = rt.execute(create_attr_script) + rt.custAttributes.add(root_scene, attr) + + return root_scene.OpenPypeContext.context + + def update_context_data(self, data, changes): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + + lib.imprint(context, data) + + def get_context_data(self): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + return lib.read(context) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + rt.saveMaxFile(dst_path) + + +def ls(): + ... \ No newline at end of file diff --git a/openpype/hosts/max/hooks/set_paths.py b/openpype/hosts/max/hooks/set_paths.py new file mode 100644 index 0000000000..3db5306344 --- /dev/null +++ b/openpype/hosts/max/hooks/set_paths.py @@ -0,0 +1,17 @@ +from openpype.lib import PreLaunchHook + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["max"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/3dsmax/__init__.py b/openpype/hosts/max/plugins/__init__.py similarity index 100% rename from openpype/hosts/3dsmax/__init__.py rename to openpype/hosts/max/plugins/__init__.py diff --git a/openpype/hosts/3dsmax/startup/startup.ms b/openpype/hosts/max/startup/startup.ms similarity index 100% rename from openpype/hosts/3dsmax/startup/startup.ms rename to openpype/hosts/max/startup/startup.ms diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py new file mode 100644 index 0000000000..afcbd2d132 --- /dev/null +++ b/openpype/hosts/max/startup/startup.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +from openpype.hosts.max.api import MaxHost +from openpype.pipeline import install_host + +host = MaxHost() +install_host(host) + diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index c07350ba07..c0c103ea10 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -152,7 +152,7 @@ class HostsEnumEntity(BaseEnumEntity): schema_types = ["hosts-enum"] all_host_names = [ - "3dsmax", + "max", "aftereffects", "blender", "celaction", From 2e4db127569edce09336d84fcc4835954fbe6ce6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 30 Nov 2022 03:32:24 +0000 Subject: [PATCH 146/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index bf9f97d5f4..9a34c85bf8 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.1" +__version__ = "3.14.8-nightly.2" From 29b9603aab1b14e54026f23965281a1fda7d53a8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:26:42 +0100 Subject: [PATCH 147/202] change start_number if input is sequence instead of adding -ss --- openpype/plugins/publish/extract_review.py | 57 +++++++++++++--------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index f299d1c6e9..af49f7d79b 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -598,9 +598,13 @@ class ExtractReview(pyblish.api.InstancePlugin): if temp_data["input_is_sequence"]: # Set start frame of input sequence (just frame in filename) # - definition of input filepath - ffmpeg_input_args.extend([ - "-start_number", str(temp_data["first_sequence_frame"]) - ]) + # - add handle start if output should be without handles + start_number = temp_data["first_sequence_frame"] + if temp_data["without_handles"] and temp_data["handles_are_set"]: + start_number += temp_data["handle_start"] + ffmpeg_input_args.extend( + ["-start_number", str(start_number)] + ) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { @@ -609,49 +613,54 @@ class ExtractReview(pyblish.api.InstancePlugin): # "23.976": "24000/1001" # } # Add framerate to input when input is sequence - ffmpeg_input_args.append( - "-framerate {}".format(temp_data["fps"]) - ) + ffmpeg_input_args.extend([ + "-framerate", str(temp_data["fps"]) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) # - this is definition of an output - ffmpeg_output_args.append( - "-start_number {}".format(temp_data["output_frame_start"]) - ) + ffmpeg_output_args.extend([ + "-start_number", str(temp_data["output_frame_start"]) + ]) # Change output's duration and start point if should not contain # handles - start_sec = 0 if temp_data["without_handles"] and temp_data["handles_are_set"]: - # Set start time without handles - # - check if handle_start is bigger than 0 to avoid zero division - if temp_data["handle_start"] > 0: + # Add -ss (start offset in seconds) if input is not sequence + if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] - ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec)) + # Set start time without handles + # - Skip if start sec is 0.0 + if start_sec > 0.0: + ffmpeg_input_args.extend([ + "-ss", "{:0.10f}".format(start_sec) + ]) # Set output duration inn seconds - ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds)) + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: - ffmpeg_output_args.append("-frames:v {}".format(output_frames_len)) + ffmpeg_output_args.extend([ + "-frames:v", str(output_frames_len) + ]) # Add duration of an input sequence if output is video if ( temp_data["input_is_sequence"] and not temp_data["output_is_sequence"] ): - ffmpeg_input_args.append("-to {:0.10f}".format( - duration_seconds + start_sec - )) + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) # Add video/image input path - ffmpeg_input_args.append( - "-i {}".format( - path_to_subprocess_arg(temp_data["full_input_path"]) - ) - ) + ffmpeg_input_args.extend([ + "-i", path_to_subprocess_arg(temp_data["full_input_path"]) + ]) # Add audio arguments if there are any. Skipped when output are images. if not temp_data["output_ext_is_image"] and temp_data["with_audio"]: From f128425155e0144378882cc0b71d5444d82c9f44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Je=C5=BEek?= Date: Wed, 30 Nov 2022 17:27:12 +0100 Subject: [PATCH 148/202] Update openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../deadline/plugins/publish/submit_celaction_deadline.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index 7913851d8a..ea44a24459 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -197,9 +197,9 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): response = requests.post(self.deadline_url, json=payload) if not response.ok: - self.log.error("Submission failed!") - self.log.error(response.status_code) - self.log.error(response.content) + self.log.error( + "Submission failed! [{}] {}".format( + response.status_code, response.content)) self.log.debug(payload) raise SystemExit(response.text) From 7a90f8f084b8f0d242564584b4df296106c997f9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 30 Nov 2022 17:33:47 +0100 Subject: [PATCH 149/202] celaction: shifting argparse to publish plugin --- .../publish/collect_celaction_cli_kwargs.py | 18 +++++++++++-- openpype/hosts/celaction/scripts/__init__.py | 1 - .../hosts/celaction/scripts/publish_cli.py | 27 ------------------- 3 files changed, 16 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index e552e9ba6a..bf97dd744b 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,6 @@ import pyblish.api -from openpype.hosts.celaction import scripts +import argparse +import sys from pprint import pformat @@ -10,7 +11,20 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - passing_kwargs = scripts.PASSING_KWARGS.copy() + parser = argparse.ArgumentParser(prog="celaction") + parser.add_argument("--currentFile", + help="Pass file to Context as `currentFile`") + parser.add_argument("--chunk", + help=("Render chanks on farm")) + parser.add_argument("--frameStart", + help=("Start of frame range")) + parser.add_argument("--frameEnd", + help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__ self.log.info("Storing kwargs ...") self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py index dfd9b37ae2..e69de29bb2 100644 --- a/openpype/hosts/celaction/scripts/__init__.py +++ b/openpype/hosts/celaction/scripts/__init__.py @@ -1 +0,0 @@ -PASSING_KWARGS = None diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py index 586880dc4c..39d3f1a94d 100644 --- a/openpype/hosts/celaction/scripts/publish_cli.py +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -1,13 +1,11 @@ import os import sys -import argparse import pyblish.api import pyblish.util import openpype.hosts.celaction from openpype.lib import Logger -from openpype.hosts.celaction import scripts from openpype.tools.utils import host_tools from openpype.pipeline import install_openpype_plugins @@ -20,30 +18,6 @@ PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -def cli(): - parser = argparse.ArgumentParser(prog="celaction_publish") - - parser.add_argument("--currentFile", - help="Pass file to Context as `currentFile`") - - parser.add_argument("--chunk", - help=("Render chanks on farm")) - - parser.add_argument("--frameStart", - help=("Start of frame range")) - - parser.add_argument("--frameEnd", - help=("End of frame range")) - - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - - scripts.PASSING_KWARGS = parser.parse_args(sys.argv[1:]).__dict__ - - def main(): # Registers pype's Global pyblish plugins install_openpype_plugins() @@ -59,6 +33,5 @@ def main(): if __name__ == "__main__": - cli() result = main() sys.exit(not bool(result)) From 63eae39de5b26d63b4312c7e57ad8bb5ad5767c7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:37:14 +0100 Subject: [PATCH 150/202] moved few lines to make it more logical --- openpype/plugins/publish/extract_review.py | 24 +++++++++------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index af49f7d79b..61f162dfcc 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -616,6 +616,11 @@ class ExtractReview(pyblish.api.InstancePlugin): ffmpeg_input_args.extend([ "-framerate", str(temp_data["fps"]) ]) + # Add duration of an input sequence if output is video + if not temp_data["output_is_sequence"]: + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) @@ -627,6 +632,11 @@ class ExtractReview(pyblish.api.InstancePlugin): # Change output's duration and start point if should not contain # handles if temp_data["without_handles"] and temp_data["handles_are_set"]: + # Set output duration in seconds + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) + # Add -ss (start offset in seconds) if input is not sequence if not temp_data["input_is_sequence"]: start_sec = float(temp_data["handle_start"]) / temp_data["fps"] @@ -637,26 +647,12 @@ class ExtractReview(pyblish.api.InstancePlugin): "-ss", "{:0.10f}".format(start_sec) ]) - # Set output duration inn seconds - ffmpeg_output_args.extend([ - "-t", "{:0.10}".format(duration_seconds) - ]) - # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: ffmpeg_output_args.extend([ "-frames:v", str(output_frames_len) ]) - # Add duration of an input sequence if output is video - if ( - temp_data["input_is_sequence"] - and not temp_data["output_is_sequence"] - ): - ffmpeg_input_args.extend([ - "-to", "{:0.10f}".format(duration_seconds) - ]) - # Add video/image input path ffmpeg_input_args.extend([ "-i", path_to_subprocess_arg(temp_data["full_input_path"]) From 42588daab5033e5cdd7a0e9c3bbd9d626550c24d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 30 Nov 2022 17:38:16 +0100 Subject: [PATCH 151/202] unify formatting --- openpype/plugins/publish/extract_review.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 61f162dfcc..9310923a9f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -602,9 +602,9 @@ class ExtractReview(pyblish.api.InstancePlugin): start_number = temp_data["first_sequence_frame"] if temp_data["without_handles"] and temp_data["handles_are_set"]: start_number += temp_data["handle_start"] - ffmpeg_input_args.extend( - ["-start_number", str(start_number)] - ) + ffmpeg_input_args.extend([ + "-start_number", str(start_number) + ]) # TODO add fps mapping `{fps: fraction}` ? # - e.g.: { From 7544771744427522841e580ac1cee4945b6d07d4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 13:29:18 +0100 Subject: [PATCH 152/202] replace reset with configure locations --- .../modules/ftrack/plugins/publish/integrate_ftrack_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 231bd8e81e..0e8209866f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -46,8 +46,9 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin): return session = context.data["ftrackSession"] - # Reset session and reconfigure locations - session.reset() + # Reset session operations and reconfigure locations + session.recorded_operations.clear() + session._configure_locations() try: self.integrate_to_ftrack( From 4acbb9fa1823aec8adcec1e25e69c48a49a51979 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 1 Dec 2022 14:19:30 +0100 Subject: [PATCH 153/202] general: integrate skips transfere files in src == dst --- openpype/plugins/publish/integrate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788..7e4fc84658 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,6 +291,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: + if src == dst: + continue + # todo: add support for hardlink transfers file_transactions.add(src, dst) From 15fa6f6f18b00659351c133b5db0bf342c5a0035 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 14:53:00 +0100 Subject: [PATCH 154/202] fix occational double parents issue --- openpype/modules/ftrack/lib/avalon_sync.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 935d1e85c9..0341c25717 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -1556,7 +1556,7 @@ class SyncEntitiesFactory: deleted_entities.append(mongo_id) av_ent = self.avalon_ents_by_id[mongo_id] - av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items = list(av_ent["data"]["parents"]) av_ent_path_items.append(av_ent["name"]) self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) @@ -1855,7 +1855,7 @@ class SyncEntitiesFactory: _vis_par = _avalon_ent["data"]["visualParent"] _name = _avalon_ent["name"] if _name in self.all_ftrack_names: - av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items = list(_avalon_ent["data"]["parents"]) av_ent_path_items.append(_name) av_ent_path = "/".join(av_ent_path_items) # TODO report @@ -1997,7 +1997,7 @@ class SyncEntitiesFactory: {"_id": mongo_id}, item )) - av_ent_path_items = item["data"]["parents"] + av_ent_path_items = list(item["data"]["parents"]) av_ent_path_items.append(item["name"]) av_ent_path = "/".join(av_ent_path_items) self.log.debug( @@ -2110,6 +2110,7 @@ class SyncEntitiesFactory: entity_dict = self.entities_dict[ftrack_id] + final_parents = entity_dict["final_entity"]["data"]["parents"] if archived_by_id: # if is changeable then unarchive (nothing to check here) if self.changeability_by_mongo_id[mongo_id]: @@ -2123,10 +2124,8 @@ class SyncEntitiesFactory: archived_name = archived_by_id["name"] if ( - archived_name != entity_dict["name"] or - archived_parents != entity_dict["final_entity"]["data"][ - "parents" - ] + archived_name != entity_dict["name"] + or archived_parents != final_parents ): return None @@ -2136,11 +2135,7 @@ class SyncEntitiesFactory: for archived in archived_by_name: mongo_id = str(archived["_id"]) archived_parents = archived.get("data", {}).get("parents") - if ( - archived_parents == entity_dict["final_entity"]["data"][ - "parents" - ] - ): + if archived_parents == final_parents: return mongo_id # Secondly try to find more close to current ftrack entity @@ -2350,8 +2345,7 @@ class SyncEntitiesFactory: continue changed = True - parents = [par for par in _parents] - hierarchy = "/".join(parents) + parents = list(_parents) self.entities_dict[ftrack_id][ "final_entity"]["data"]["parents"] = parents From 11a2ce396b9afec2036668cd69cd1658efee004c Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:28:14 +0000 Subject: [PATCH 155/202] Consistent Python version --- website/docs/dev_requirements.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 1c8958d1c0..fa2d996e20 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. From 73393a75b7e33c5dca88dacc4d8f05634da034f3 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Fri, 2 Dec 2022 10:47:06 +0000 Subject: [PATCH 156/202] Note about unrestricted execution on Windows. --- website/docs/dev_build.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d..9c99b26f1e 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Run from source -For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator: + +`Set-ExecutionPolicy -ExecutionPolicy unrestricted` To start OpenPype from source you need to From a465315f034d9e297a27bdda9a9f37a52b21bc97 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 12:08:59 +0000 Subject: [PATCH 157/202] Add optional keyword to suspend_refresh. --- openpype/hosts/maya/api/lib.py | 8 ++++---- .../plugins/publish/extract_pointcache.py | 20 +++++++------------ 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba..b2bbb823aa 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -127,14 +127,14 @@ def get_main_window(): @contextlib.contextmanager -def suspended_refresh(): +def suspended_refresh(suspend=True): """Suspend viewport refreshes""" - + original_state = cmds.refresh(query=True, suspend=True) try: - cmds.refresh(suspend=True) + cmds.refresh(suspend=suspend) yield finally: - cmds.refresh(suspend=False) + cmds.refresh(suspend=original_state) @contextlib.contextmanager diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 5f5532e60a..23b76a48c2 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,21 +86,15 @@ class ExtractAlembic(publish.Extractor): start=start, end=end)) - if instance.data.get("refresh", False): + with suspended_refresh(suspend=instance.data.get("refresh", False)): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) - else: - with suspended_refresh(): - with maintained_selection(): - cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) + extract_alembic( + file=path, + startFrame=start, + endFrame=end, + **options + ) if "representations" not in instance.data: instance.data["representations"] = [] From b14a0718d274522fe269a10237e628f6110437af Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 14:15:59 +0100 Subject: [PATCH 158/202] update change log and history for release --- CHANGELOG.md | 22 ++++++++++++++++++++++ HISTORY.md | 20 ++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0c5f2cf8b5..3cca692b68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + + ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) diff --git a/HISTORY.md b/HISTORY.md index 04a1073c07..f4e132488b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,25 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) ## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) From 6078a5746feb875c53d6d37274a52097b447bcd0 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:26:03 +0000 Subject: [PATCH 159/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 9a34c85bf8..6903ab4d10 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.2" +__version__ = "3.14.8-nightly.3" From 2195cefe8a76e42b93eab2cf5195e9cb30ca79bd Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:32:51 +0000 Subject: [PATCH 160/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 6903ab4d10..b27b98e2fa 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.3" +__version__ = "3.14.8-nightly.4" From 42984c54667ea1f27229368cbc5fc00e425a5575 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Fri, 2 Dec 2022 13:34:53 +0000 Subject: [PATCH 161/202] [Automated] Release --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index b27b98e2fa..fc687a1263 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8-nightly.4" +__version__ = "3.14.8" From 412d03d382aef31769f3d6f61e6ec70fa53fda6d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 15:52:04 +0100 Subject: [PATCH 162/202] Merge fixes for tests from branch with Deadline tests. Branch with newly implemented DL tests is targetted to release 3.15, but changes from it affects tests in develop. This branch should solve issues with automatic tests in develop without need of full release 3.15 changes. --- tests/README.md | 10 +++ tests/conftest.py | 12 +++ tests/integration/hosts/aftereffects/lib.py | 22 ++++-- .../test_publish_in_aftereffects.py | 34 ++++++--- ...test_publish_in_aftereffects_multiframe.py | 36 ++++++--- tests/integration/hosts/maya/lib.py | 19 ++++- .../hosts/maya/test_publish_in_maya.py | 57 ++++++++------ tests/integration/hosts/nuke/lib.py | 31 ++++++-- .../hosts/nuke/test_publish_in_nuke.py | 25 +++++-- tests/integration/hosts/photoshop/lib.py | 11 ++- .../photoshop/test_publish_in_photoshop.py | 6 +- tests/lib/db_handler.py | 23 +++--- tests/lib/testing_classes.py | 70 ++++++++++++++---- tests/resources/test_data.zip | Bin 7350 -> 5098 bytes tests/unit/igniter/test_bootstrap_repos.py | 30 ++++---- 15 files changed, 275 insertions(+), 111 deletions(-) diff --git a/tests/README.md b/tests/README.md index 69828cdbc2..d36b6534f8 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a6..7b58b0314d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073d..ffad33d13c 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,15 +18,15 @@ class AfterEffectsTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, + "test_project_test_asset_test_task_v001.aep") + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path @@ -32,3 +35,12 @@ class AfterEffectsTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs", "Auto-Save"] + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4925cbd2d7..57d5a3e3f1 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Uses generic TestCase to prepare fixtures for test data, testing DBs, @@ -32,10 +32,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -49,27 +49,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index c882e0f9b2..2d95eada99 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -1,15 +1,15 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects - Should publish 5 frames + Should publish 10 frames """ PERSIST = True @@ -19,10 +19,10 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -36,27 +36,41 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c065..ab402f36e0 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,7 +18,7 @@ class MayaTestClass(HostFixtures): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -23,7 +26,7 @@ class MayaTestClass(HostFixtures): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path @@ -39,3 +42,11 @@ class MayaTestClass(HostFixtures): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b0564428..b7ee228aae 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,8 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) @@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" @@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 11 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba81..baff675da7 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,17 +1,20 @@ import os import pytest -import shutil +import re -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -27,7 +30,16 @@ class NukeTestClass(HostFixtures): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path @@ -41,4 +53,11 @@ class NukeTestClass(HostFixtures): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b5..f84f13fa20 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,17 +1,25 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were @@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): @@ -52,7 +61,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -61,7 +70,7 @@ class TestPublishInNuke(NukeTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae6..9d51a11c06 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import os import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +35,7 @@ class PhotoshopTestClass(HostFixtures): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e..4aaf43234d 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -72,7 +72,7 @@ class TestPublishInPhotoshop(PhotoshopTestClass): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b181055012..82e741cc3b 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ class DBHandler: "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): # noqa + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -133,7 +132,11 @@ class DBHandler: db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" @@ -231,13 +234,15 @@ class DBHandler: # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f81095..5e3b11cfc9 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,9 +8,12 @@ import tempfile import shutil import glob import platform +import requests +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler +from openpype.modules import ModulesManager class BaseTest: @@ -36,9 +39,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] @@ -57,7 +60,7 @@ class ModuleUnitTest(BaseTest): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -78,7 +81,8 @@ class ModuleUnitTest(BaseTest): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = (persist or self.PERSIST or + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -125,7 +129,8 @@ class ModuleUnitTest(BaseTest): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -135,13 +140,14 @@ class ModuleUnitTest(BaseTest): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.is_test_failed(request) + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -166,6 +172,13 @@ class ModuleUnitTest(BaseTest): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. @@ -188,7 +201,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -210,10 +223,10 @@ class PublishTest(ModuleUnitTest): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): @@ -310,7 +323,8 @@ class PublishTest(ModuleUnitTest): yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -328,12 +342,33 @@ class PublishTest(ModuleUnitTest): glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) + + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + + return filtered -class HostFixtures(PublishTest): +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): @@ -344,3 +379,8 @@ class HostFixtures(PublishTest): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3519 zcmaJ@2{=^iA0A|BtYb!D2xCNpu{L(enk)$;^`FuhnnK9lwM?=ljAbyiA|y-5(nu5& zl58c)E)0qxZOHPUp}N<-_qpGB&U3zVp7;Ho_xsNGp6{*J`XHNxt-uPkv+%I(fB;wl z06>j{BRUz6*l{r$hfYpY1~IRU$G@92Nq|E{4jX{=@bYx>bM^J}RkE?zB6Dq$ue@4( zY0C)!T!jDt>P#?U8<{iyXRI|*ryh6`hs_fO#nlge8!HvVqqXQ*32TX zI}#Eups@}}3MwtEeHc7gEyia1jDX4iP#*ZH!e&tTWKi9w$yXrUA|yn*!t(*@Fz?bxCih3_1kQRSZYdZ#IXIi{+P|_K*{XTLV`b8=^DbfJ1mcWt z%~{*PoF8lBRsQV>hgIcM2gjUs%e@~(xbxo&H1?2P=m1?Jq7f=mWX=W9!$+<;J8!E7 z?P)6@!ag1Z?MSgYPpbV-hN#0#w=g8ckeDc6wR=KU)lORUYoEQi3xR@SxYtm zZN>2#S43Valot{T76Cf8M?Y}vAS16Txb1Fd*7*>`Lc=KCDy5^jxMU5_kglJC&nG*D?72{$?u|2b#VG261bjOG$`QUx$4glo@FLh+7s{l%x2OXMD2V&wdJM?CLv` zojPuWcPh*dpC|k=+h05ej*|4_H~M%pV}?P)3={ z1b}Womty924Q4MgCOE9v7_)O&aHjbTfog1-B0<#pGEjW8Nwo-8^2%A{V!#pnAQoH7 z;gGK2y==z?uF`-e{ILX05YrCJs}7Dl5;qZhufMp(h?9`2)Q2h^OvP01s(+69OV%sIwAB_~voy`c6^98w6cRMUWe2%GAmu8y> zy{8;?5tWsD(1J(3etfji**Gf{eQzy8VoC5G>|lcxAt?r?(G;{=hEuQqUssskuIdY% z);zkA?dV>Q!;LgK@3?^sB<7BWE@z!oIWZ^a;U~Fh4CK+O|I#4vp_TWTM_vjIn-0gl z@*|J=_7#|X(CivE^{_FegkJk1IqRKGvpaS}cnRt`(lO%4HZSQb{yO8SjOB5$*QUA7 z^0JK_1ewpU+c)QKI$<0^PRP%taXxw?T|V%;KN@+5UR@JIzEoSi>luES>ylHRkRLl; z@Z+$m%E5YrJ6W-UjDq`7o*I z)@Y({0mied;||Y@V{fb);bxY{ZygBK4P3>ElI7W*PW;%YpqI_Ye1_Xt8>!xJP*^Ry z8@aD(dgc_3bF2>>s=M|mg}rv;1+g>{TE!^Of!m z1cBwUEMM@D`l@8r*|4KrfXB4~Z^;)A%(o5R@6mC&r2`D9h(5jl_Ea4ZVe;Zs6mHo% z%KhCJ3_A8mVdZjWrx+Kh+@-JkYM(#MIn3mdS2_8x`SzU~P-NAFSB@V};G6jPFwW@=kCWwlli?BIpot@>Tnz z`XA$}?p}5E7jKIXm(ZAAnw0J7{!S`D<)&uw?0ax8zrUNR{mQ~l+`J#@NkoGfq9`_RRCw?U^J2mL-@B7?^mDHggLb0Yku*QTchMMwD80Rb6F&!=)Ngu%GZICYpTjx82Rg#$`x zl!Qc%)mma6Tsl$a!=`cSX-0HooK&^?zSHWFwHD*ACMPyj+4VL5u#2o0dYr}0C#3{F zPfp6}&Q{f(sfg*WeS)-sa-GRWi3%T`=h2#nm{Ai_jY^Ac^sE*KFWlqswLJ{7x4>`1>D7p3&wfxta;FpRa=S<59i`Z~6FKxn#btfk&>*mgd~G`3 zz^R|p;P{TinVMhk*zjTqb;Fnu0mS$^jo7j>0nIfB6kzI+=|ztUP$Zvqt%E%_4uAeD{9}7x^uKDZHA8VKv#!`PuHk zE!5Tv=9nNTDFQ0IwTf;-MOp3nNVQPue|&x6ZPjD=NbAtO{{U^5P{&8o7ZQiCb1 z8Ao(24(GQxu%{!~)D>*@3xMhx0Qeig%HGk<^_OVuYti^OM4P$UIlBB048`AK92~*+ zj$V%7Kf%ZV~5Z-Q;;l_Z{v#(ni-;f;D+uA>_3_4;u= zI>y9!s)`ru+DgUns)GsnWtapLep49acxd0{2Wk0TWj4*{imJI2Uk69VWvnWFZM?j| ze%qMx8u;{#E>lYWvOov!cI4*YZd;S;@PlY*>`z9zUMyVrD>_u zk;FT^8hqleVs%_zQL(~>Hsp7;rFNAStI;$Q-g4g}F+&Z$Wt|dOA(V#f@d()?^JueI z2U)cFn;&KHmxB*NZdS^iDfzlzBFgn{(~=Rb+ClY_syq@ zw+DCa2k9DL|6G@sbzJI8MOF;0-xKJ?`7i6(4(w`tvBcS|p#=?&>BoG?0hIm$UQ{tb zO$2FgwnIdX(AP~lAbGT67z!sIx^qt}npX#!QxNn-tt?~<2x6Od?}-azkmC>@k(f4A zwcy+(%^;wIk0qB&dBOuG65f_auz@AZ`Y}+I@lXpWVT1wF4do6C+Jqf~BzK)5wNP1B zPwySh7<&=C2p(?rR%|^hkkGAEQqx5JfUgm!$MJ@rogpqsstbr6`<5_qzpKBzqwWlO z6Y2Mn3wYKS?n6iH0A0l_|2%RRu&b+;{eM?+Mx3%jrx;L%8K)~S4NK1(*@JDCX{|kU zzNO?jbBg;{dRI+EXPhq|I;#p+E8K?ToZ#i{Qgb$iMNTqN1L3L>cMP)}>?vltdgr?O zmAW&HF#%i=FJ+B2Nd$sMVzE!qlSES6Dgpj1rF7QGj>&NmaAVOJt>h+pP733Wt+X)@ zk-JY#+)*|CTXwTGHu+J!QQteImc()L@Y_7F6O8yFh@RlYyLSjwXp4;uI(z1>gV8{#P(K>s0e!7z(GEu z7O8qsi2wP_XUyMI=k**{#TS~gH0X8b|47~Mg)<-yEgamCg+?X4mq3NQV?_I=Ku-9t z(~yR;Gd4N@39nxds4$ z>*7KFx5K$qHj`g;K*MlVt$nVv_X$)3cDVrn_J73w!QtraU=3BgKjdKhfxa#2eY_UI zMjkuV_tg*uMbsVDhzxIG?#-hzzCKl);>U0*CfM}iyrUz zfzt#u3p-niNlru03{b6<2 ze|47I|I9RG%Z5L+ai3iuct==wY(TroCgcHI{)4@l!$y z?7%j};EwBS$(9Xc3 zugDm^S_!1PFaDxQBC~61!1HXOu(;I9S)P$mOkJIaSv&vb1E%CaEvg~u>TU$PJ2WbJ z;bg%L?s6;%sxqDQ^N$N_CBQ=y$x5i`qf17efjb+MLH62VC2w(G1nJ4g8OP_T-)FCvpK0!;0$R3atfiSKInhJW>?c1L4GigStKEX#py?w z3EQ8S%J?KQ54C9-QZ}9+ru+d`bU9^XsnK#n_f_L8msnT8C)-A=$*KX`RY~RHFzJj} z)vY0|(uh0UvT)_90Lg`9kGL=$g#}wLS8B9no91|Dm9W}4z|bIVOe!$*A}FuZ+bl3iO(Z4 zH+j0GKb>D}!Aid;>`aEhck0z8K-76bK&8WPj!!qY=4JBTVd zxu8?YW@7igM%t@QPHCUU)|Zg5W(oa?;leTPd>eyY@i&fkp+bm1q+4P&R@&<{D3QD$ zp2LjyyNvaGzMCY?yh#YdcW@nChc~P$mkzc_)r(R=?N4rA zit{0De1m-w!e&{~Nm*)X4jAB&yRVN3cJ7S$-dzG*?IZ*-=PHJYPQM$#hpV$*jOE)d zg99xSvCL1=$SEzQE<{Ba<{GG14P$;n|Mmvk$12EyPk#pG#WOI$Tp$;V2DZ_1E+UJ+ zggL3EmM$xTe<{9Xw<^7sgM@Wu!FrdQ`-~zw13h+Rw@SpkvS+!10@j&&&)cSv;Wmge zGHI}P8lF+|H`CctcE?(Bxdyj31l+k)i;uHonPe1eIvOk0=H4B3TB9|znjax$vX!xSMi@b_C_J~>WmEWUV6y%-V`pG{#)xe=lmcPcUy~>u5LKm z`0GU!bvy|jNHMYJO?K>?uAxulI5AzzPhzk5O==WuwYRp76(YPv~iCu zjVLUI>7d=%^ql7^ZDTaTK-*wo@#d}_2}D(TA*w2pnX`h$_OwiUJZF@GU z%*u1sPNHdxS9k?&_RxXK-!O=DmffOOF|?6yP2ba_OMlE)OhSUuHa8yykxPK&LfklT z@g+&R=_hV07~{EzahwIeSK^x7zN6?ur1!olgz6*&kX|y#h{{- zL3klV!X+3NyDFYZJQgnBmQ+-L5&gCFatLUtYxcuv+5O{$Pd&h9UQj)H&Adqr3{Iu- z#s?rt+cpZjs%WHiTYEwVUm2%iNxJ2f27r|B$Hy_pvu#RHq=6EYef9px)-zrx$^$x#(auVSGjhrUzcAC4bWD9}#R zP524BC4d5j_T$=t;;mP8iA8Gq8e`6O4ZaXLP?cnFMQXAHTn6sEY$V~%TdfH7DpeEm<2N@|@f(b$&Cp%iLu$%A}KGw9GD~eJ)_-Zse z!qyc@IRFl|sKg~o|HAmGlckX&G3V<@8&}NKcOi4HWc-~ z>E2nB#knaP-=d~0;wxBt=t2>gPqq3WM;eQT-Po%7OVM(j9Q*G(%+z9@yi~93`bwKc z6CeZoaNn671Bt zUO0D=u1}u_OUc(Nwu~Bj{CMIy$G>JCpCPV=Tg>J!Wt5*2m6VBB`g)0(6IV~dv$)Ha zCgwAbTI(>X7O3Yj)(E)BvZ^NaSu4{b)XfEXRfxl`@RLd-D8l<^lkj>)eNj9N+ex7K zzIflmVHJK{Z%{1x9hppW+S?sz{KRcIv(qdw{GB4!+i0d_fh=gGIxAeU%U$|mECr~t z$vz)@QOxS=^v#007JIn#S(Eyf1&dH=HNe{1rxv7IEgAP(TB-*Rl0KoiE57R zyutJbBhN~jTv_@QYr_Twu9*lmG#iT0gjED=_ z=g=eL{pN_b^qekuROR(uC5yBn^d4R!8=3?qa8A)UZl2bZd9v51CzUMJ7J;27m=EFo zySLpQi3UcHg+{>r@Bqc3mx~8-4#MO?%Yd@(+1rODaBAHsnF2*b zAoUfP(>!B!*@BStadXG7YZP%pLFo*_Y}h^Lf$;J5eefkCinlkUN}p@>k8*nysjH8z zPgn3Sb(Wj*FNt0qcP`%XjWOTP`9t6Bv!K`A|L-0D_Y+T)IA%K~F2e4?Vmv}Mgtj?| zgx!dEq@7+|mxGlMV_|2c&h^xsj=jT6(qWo&mdk*UOGTuV+m3*OGh5u)b9fclmmkGMsN!zmT!^fS*ro`a?QBb>S*Lx&RB>`r508eC3%T1E@oY)t%+l!XH7yC1C&n+W!UnFa8gUpm=3j z4FF5~191O!U=Luz(=b5PBY>6}A#VdXF(2!xvHyUUtiZdQLzW^y)I_tROP^GdG=nr) z|I;^8KmE!zI22QD?8WuX4GLmdHG5klqd>Op#So63E|f+r81e50Wp8gIhk{^u#(;bc z0-+7Ttde|PQy+&0aafWJV^i;gbYhD-`!&%3f1d!n8%A>BZTxxGgL-_|Oh&>RATF-P z&aQ5bzo00up>E2whl8_?xs~m&1|v@-mHLcuU2G0C%l%M$;&;PTO!a+H)5y9}-;i(2QZ6$( zyjpBe)!FHi4r3}lF{=b%j2}WqthNI0*0pYfRS_mb==kASfeM7p&jgXyZZ`(p2KVLT z3sR<8Xx0}WXT4%y#*FA1UD-VL;cwZgP3t0K(c0XY*#Jw3lpFMiG~0^^mAel`f-{$= zek4{N!n8@3-fMq1gDJr*6cH>l+?P(j)jN#?M^f$$zc4Eq|3G%vi%>X_3IwzFq$5l_ z?16Z#_`I@JM#Q<6Jz_zqmI}F+ z=D@|lub&GXyKS9JXrG&V4aQ`kk;Z5#9+C?A&iZi@_@>tv2->Ld-abltl>hNdJJQoY zSM#+6*$O)O;;_U!?YBm=&-J}*)V{QOG3k)Z4-q};HdqS}a8Pqe+EX{oOh7Ud*S$^O zT*&I)>0R{9(zIJ}3DQgBYsVA2hpXRk$ABMoR7=7pH+eqS_{WGLLMgbv&Z&!$KrQnd z97quc78mgMZaK7wpzXrmzG%OC=U2F!-1E!$3*7JC^Dkcd6~Rqz`W1oBZx8Jx@f5-aUc7L7zy5V?15BP(=0sbH9f3qL2Ghg@PE_g3HeuVJv zng6Eqf7Oc%kL;W@z+c4lDib%!=>^SYXLtV>&9CBmHU7U&)P=5Hwu_$C z v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def test_find_openpype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" From bf58eb8322e3c7092c7dc5b49f636311493dfb63 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:04:10 +0100 Subject: [PATCH 163/202] Hound --- tests/integration/hosts/nuke/lib.py | 1 + tests/lib/testing_classes.py | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index baff675da7..70860b92b3 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -59,5 +59,6 @@ class NukeHostFixtures(HostFixtures): def skip_compare_folders(self): yield [] + class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 5e3b11cfc9..82cc321ae8 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,12 +8,10 @@ import tempfile import shutil import glob import platform -import requests import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler -from openpype.modules import ModulesManager class BaseTest: From 52073873526505251d0087286bdb8775f3c050d9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:04 +0100 Subject: [PATCH 164/202] Added AE test with old stored instances Release 3.15 will move to New Publisher --- .../test_publish_in_aftereffects_legacy.py | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py new file mode 100644 index 0000000000..8c7a74c60e --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AELocalPublishTestClass): + """Basic test case for publishing in AfterEffects + + Uses old Pyblish schema of created instances. + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", + "") + ] + + APP_GROUP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() From e68ad503e74907bc19a1e7ea71a6a07f675a7e4d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:07:47 +0100 Subject: [PATCH 165/202] Remove temporarily AE tests configured for Tray Publisher --- .../test_publish_in_aftereffects.py | 91 ------------------- ...test_publish_in_aftereffects_multiframe.py | 78 ---------------- 2 files changed, 169 deletions(-) delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py deleted file mode 100644 index 57d5a3e3f1..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ /dev/null @@ -1,91 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Uses generic TestCase to prepare fixtures for test data, testing DBs, - env vars. - - Opens AfterEffects, run publish on prepared workile. - - Test zip file sets 3 required env vars: - - HEADLESS_PUBLISH - this triggers publish immediately app is open - - IS_TEST - this differentiate between regular webpublish - - PYBLISH_TARGETS - - Then checks content of DB (if subset, version, representations were - created. - Checks tmp folder if all expected files were published. - - """ - PERSIST = False - - TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", - "test_aftereffects_publish.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "png_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py deleted file mode 100644 index 2d95eada99..0000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AELocalPublishTestClass): - """Basic test case for publishing in AfterEffects - - Should publish 10 frames - """ - PERSIST = True - - TEST_FILES = [ - ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", - "test_aftereffects_publish_multiframe.zip", - "") - ] - - APP_GROUP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTest_taskMain")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "aep"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "thumbnail"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - additional_args = {"context.subset": "renderTest_taskMain", - "name": "h264_png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() From 6bba712b98d3a966c89d2589a8dfe6884a4d7391 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 16:10:02 +0100 Subject: [PATCH 166/202] nuke: viewer with Rec.709 is correctly returning full name --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2691b7447a..bde06e4fd7 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string): viewer = split[1] display = split[0] elif "(" in viewer: - pattern = r"([\w\d\s]+).*[(](.*)[)]" + pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" result = re.findall(pattern, viewer) try: result = result.pop() From 9e40ef96a2b320e413a39281beb12f7b2d6d219a Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 15:29:45 +0000 Subject: [PATCH 167/202] Ensure Mongo database directory exists. --- tools/run_mongo.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index c64ff75969..85b94b0971 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan +New-Item -ItemType Directory -Force -Path $($dbpath) + Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null From 1fcc15583c1721636244682b3e06c21b4fda729a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:31:56 +0100 Subject: [PATCH 168/202] Fix AE legacy test --- .../hosts/aftereffects/test_publish_in_aftereffects_legacy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index 8c7a74c60e..5d0c15d63a 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -62,7 +62,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTest_taskMain", + additional_args = {"context.subset": "workfileTest_task", "context.ext": "aep"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, @@ -71,7 +71,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", From e2ec1457c9014c164ed6ec807416c66ae19a5950 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 2 Dec 2022 16:35:04 +0100 Subject: [PATCH 169/202] Fix Nuke legacy test Legacy Nuke tests are not cleaning up `renders` folders. Branch with DL version disables Cleanup for now. --- tests/integration/hosts/nuke/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index 70860b92b3..96daec7427 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -57,7 +57,7 @@ class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def skip_compare_folders(self): - yield [] + yield ["renders"] class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): From 8b71066d9c33d782ca2520bce251fe733e4d8ad5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 2 Dec 2022 16:53:09 +0100 Subject: [PATCH 170/202] :art: add menu and basic publishing support --- openpype/hosts/max/api/__init__.py | 2 - openpype/hosts/max/api/lib.py | 66 ++++++++++- openpype/hosts/max/api/menu.py | 64 +++++++++-- openpype/hosts/max/api/pipeline.py | 63 +++++----- openpype/hosts/max/api/plugin.py | 108 ++++++++++++++++++ .../max/plugins/create/create_pointcache.py | 21 ++++ openpype/hosts/max/startup/startup.ms | 3 +- 7 files changed, 284 insertions(+), 43 deletions(-) create mode 100644 openpype/hosts/max/api/plugin.py create mode 100644 openpype/hosts/max/plugins/create/create_pointcache.py diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index b6998df862..503afade73 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -4,10 +4,8 @@ from .pipeline import ( MaxHost ) -from .menu import OpenPypeMenu __all__ = [ "MaxHost", - "OpenPypeMenu" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index e50de85f68..8a57bb1bf6 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,2 +1,64 @@ -def imprint(attr, data): - ... +# -*- coding: utf-8 -*- +"""Library of functions useful for 3dsmax pipeline.""" +from pymxs import runtime as rt +from typing import Union + + +def imprint(node_name: str, data: dict) -> bool: + node = rt.getNodeByName(node_name) + if not node: + return False + + for k, v in data.items(): + rt.setUserProp(node, k, v) + + return True + + +def lsattr( + attr: str, + value: Union[str, None] = None, + root: Union[str, None] = None) -> list: + """List nodes having attribute with specified value. + + Args: + attr (str): Attribute name to match. + value (str, Optional): Value to match, of omitted, all nodes + with specified attribute are returned no matter of value. + root (str, Optional): Root node name. If omitted, scene root is used. + + Returns: + list of nodes. + """ + root = rt.rootnode if root is None else rt.getNodeByName(root) + + def output_node(node, nodes): + nodes.append(node) + for child in node.Children: + output_node(child, nodes) + + nodes = [] + output_node(root, nodes) + if not value: + return [n for n in nodes if rt.getUserProp(n, attr)] + + return [n for n in nodes if rt.getUserProp(n, attr) == value] + + +def read(container) -> dict: + data = {} + props = rt.getUserPropBuffer(container) + # this shouldn't happen but let's guard against it anyway + if not props: + return data + + for line in props.split("\r\n"): + key, value = line.split("=") + # if the line cannot be split we can't really parse it + if not key: + continue + data[key.strip()] = value.strip() + + data["instance_node"] = container + + return data diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py index 13ca503b4d..d1913c51e0 100644 --- a/openpype/hosts/max/api/menu.py +++ b/openpype/hosts/max/api/menu.py @@ -1,29 +1,70 @@ # -*- coding: utf-8 -*- """3dsmax menu definition of OpenPype.""" -from abc import ABCMeta, abstractmethod -import six from Qt import QtWidgets, QtCore from pymxs import runtime as rt from openpype.tools.utils import host_tools -@six.add_metaclass(ABCMeta) class OpenPypeMenu(object): + """Object representing OpenPype menu. + + This is using "hack" to inject itself before "Help" menu of 3dsmax. + For some reason `postLoadingMenus` event doesn't fire, and main menu + if probably re-initialized by menu templates, se we wait for at least + 1 event Qt event loop before trying to insert. + + """ def __init__(self): + super().__init__() self.main_widget = self.get_main_widget() + self.menu = None + + timer = QtCore.QTimer() + # set number of event loops to wait. + timer.setInterval(1) + timer.timeout.connect(self._on_timer) + timer.start() + + self._timer = timer + self._counter = 0 + + def _on_timer(self): + if self._counter < 1: + self._counter += 1 + return + + self._counter = 0 + self._timer.stop() + self.build_openpype_menu() @staticmethod def get_main_widget(): """Get 3dsmax main window.""" return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) - def get_main_menubar(self): + def get_main_menubar(self) -> QtWidgets.QMenuBar: """Get main Menubar by 3dsmax main window.""" return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] - def get_or_create_openpype_menu(self, name="&OpenPype", before="&Help"): + def get_or_create_openpype_menu( + self, name: str = "&OpenPype", + before: str = "&Help") -> QtWidgets.QAction: + """Create OpenPype menu. + + Args: + name (str, Optional): OpenPypep menu name. + before (str, Optional): Name of the 3dsmax main menu item to + add OpenPype menu before. + + Returns: + QtWidgets.QAction: OpenPype menu action. + + """ + if self.menu is not None: + return self.menu + menu_bar = self.get_main_menubar() menu_items = menu_bar.findChildren( QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) @@ -37,10 +78,13 @@ class OpenPypeMenu(object): help_action = item.menuAction() op_menu = QtWidgets.QMenu("&OpenPype") - menu_bar.insertMenu(before, op_menu) + menu_bar.insertMenu(help_action, op_menu) + + self.menu = op_menu return op_menu - def build_openpype_menu(self): + def build_openpype_menu(self) -> QtWidgets.QAction: + """Build items in OpenPype menu.""" openpype_menu = self.get_or_create_openpype_menu() load_action = QtWidgets.QAction("Load...", openpype_menu) load_action.triggered.connect(self.load_callback) @@ -63,18 +107,24 @@ class OpenPypeMenu(object): workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) workfiles_action.triggered.connect(self.workfiles_callback) openpype_menu.addAction(workfiles_action) + return openpype_menu def load_callback(self): + """Callback to show Loader tool.""" host_tools.show_loader(parent=self.main_widget) def publish_callback(self): + """Callback to show Publisher tool.""" host_tools.show_publisher(parent=self.main_widget) def manage_callback(self): + """Callback to show Scene Manager/Inventory tool.""" host_tools.show_subset_manager(parent=self.main_widget) def library_callback(self): + """Callback to show Library Loader tool.""" host_tools.show_library_loader(parent=self.main_widget) def workfiles_callback(self): + """Callback to show Workfiles tool.""" host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 2ee5989871..cef45193c4 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -5,6 +5,8 @@ import sys import logging import contextlib +import json + from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api from openpype.pipeline import ( @@ -12,7 +14,7 @@ from openpype.pipeline import ( register_loader_plugin_path, AVALON_CONTAINER_ID, ) -from openpype.hosts.max.api import OpenPypeMenu +from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR from openpype.pipeline.load import any_outdated_containers @@ -32,6 +34,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "max" menu = None @@ -46,23 +49,10 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): pyblish.api.register_plugin_path(PUBLISH_PATH) register_loader_plugin_path(LOAD_PATH) register_creator_plugin_path(CREATE_PATH) - log.info("Building menu ...") + # self._register_callbacks() self.menu = OpenPypeMenu() - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - self._register_callbacks() - - # register_event_callback("before.save", before_save) - # register_event_callback("save", on_save) - # register_event_callback("open", on_open) - # register_event_callback("new", on_new) - - # pyblish.api.register_callback( - # "instanceToggled", on_pyblish_instance_toggled - # ) - self._has_been_setup = True def has_unsaved_changes(self): @@ -70,7 +60,7 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return True def get_workfile_extensions(self): - return [".hip", ".hiplc", ".hipnc"] + return [".max"] def save_workfile(self, dst_path=None): rt.saveMaxFile(dst_path) @@ -88,17 +78,15 @@ class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): return ls() def _register_callbacks(self): - for event in self._op_events.copy().values(): - if event is None: - continue + rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks")) - try: - rt.callbacks.removeScript(id=rt.name(event.name)) - except RuntimeError as e: - log.info(e) + rt.callbacks.addScript( + rt.Name("postLoadingMenus"), + self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks')) - rt.callbacks.addScript( - event.name, event.callback, id=rt.Name('OpenPype')) + def _deferred_menu_creation(self): + self.log.info("Building menu ...") + self.menu = OpenPypeMenu() @staticmethod def create_context_node(): @@ -128,12 +116,12 @@ attributes "OpenPypeContext" def update_context_data(self, data, changes): try: - context = rt.rootScene.OpenPypeContext.context + _ = rt.rootScene.OpenPypeContext.context except AttributeError: # context node doesn't exists - context = self.create_context_node() + self.create_context_node() - lib.imprint(context, data) + rt.rootScene.OpenPypeContext.context = json.dumps(data) def get_context_data(self): try: @@ -141,7 +129,9 @@ attributes "OpenPypeContext" except AttributeError: # context node doesn't exists context = self.create_context_node() - return lib.read(context) + if not context: + context = "{}" + return json.loads(context) def save_file(self, dst_path=None): # Force forwards slashes to avoid segfault @@ -149,5 +139,16 @@ attributes "OpenPypeContext" rt.saveMaxFile(dst_path) -def ls(): - ... \ No newline at end of file +def ls() -> list: + """Get all OpenPype instances.""" + objs = rt.objects + containers = [ + obj for obj in objs + if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + ] + + for container in sorted(containers, key=lambda name: container.name): + yield lib.read(container) + + + diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py new file mode 100644 index 0000000000..0f01c94ce1 --- /dev/null +++ b/openpype/hosts/max/api/plugin.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +"""3dsmax specific Avalon/Pyblish plugin definitions.""" +import sys +from pymxs import runtime as rt +import six +from abc import ABCMeta +from openpype.pipeline import ( + CreatorError, + Creator, + CreatedInstance +) +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr + + +class OpenPypeCreatorError(CreatorError): + pass + + +class MaxCreatorBase(object): + + @staticmethod + def cache_subsets(shared_data): + if shared_data.get("max_cached_subsets") is None: + shared_data["max_cached_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = i.get("creator_identifier") + if creator_id not in shared_data["max_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] + else: + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa + return shared_data + + @staticmethod + def create_instance_node(node_name: str, parent: str = ""): + parent_node = rt.getNodeByName(parent) if parent else rt.rootScene + if not parent_node: + raise OpenPypeCreatorError(f"Specified parent {parent} not found") + + container = rt.container(name=node_name) + container.Parent = parent_node + + return container + + +@six.add_metaclass(ABCMeta) +class MaxCreator(Creator, MaxCreatorBase): + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = rt.getCurrentSelection() + + instance_node = self.create_instance_node(subset_name) + instance_data["instance_node"] = instance_node.name + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + self._add_instance_to_context(instance) + imprint(instance_node.name, instance.data_to_store()) + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "max_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_node") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.getNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.delete(instance_node) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py new file mode 100644 index 0000000000..4c9ec7fb97 --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreatePointCache(plugin.MaxCreator): + identifier = "io.openpype.creators.max.pointcache" + label = "Point Cache" + family = "pointcache" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + from pymxs import runtime as rt + + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/startup/startup.ms b/openpype/hosts/max/startup/startup.ms index 94318afb01..aee40eb6bc 100644 --- a/openpype/hosts/max/startup/startup.ms +++ b/openpype/hosts/max/startup/startup.ms @@ -2,7 +2,8 @@ ( local sysPath = dotNetClass "System.IO.Path" local sysDir = dotNetClass "System.IO.Directory" - local startup = sysPath.Combine (sysPath.GetDirectoryName getSourceFile) "startup.py" + local localScript = getThisScriptFilename() + local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py" python.ExecuteFile startup ) \ No newline at end of file From 1719e33b00807c336fdf6367460b9fb386a91930 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 2 Dec 2022 17:20:32 +0100 Subject: [PATCH 171/202] flame: create vertically aligned subsets fix --- openpype/hosts/flame/api/plugin.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 26129ebaa6..7e012330cf 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,18 +596,19 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): - hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: + _hero_data = deepcopy(hero_data) + _hero_data.update({"heroTrack": False}) + if _in <= self.clip_in and _out >= self.clip_out: data_subset = hero_data["subset"] # add track index in case duplicity of names in hero data if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + _hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = hero_data + tag_hierarchy_data = _hero_data # add data to return data dict self.marker_data.update(tag_hierarchy_data) From b15b5832241fda937dfda287220a70e66f8bcb7e Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 3 Dec 2022 03:28:38 +0000 Subject: [PATCH 172/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index fc687a1263..5e61ee3a6b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.8" +__version__ = "3.14.9-nightly.1" From ee921e0bd4f384a3a94707d706f251e4aa997927 Mon Sep 17 00:00:00 2001 From: Derek Severin Date: Sat, 3 Dec 2022 17:04:02 +0700 Subject: [PATCH 173/202] Removed class variable and TODOs --- .../plugins/publish/validate_texture_workfiles.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index a25b80438d..a7ae02a2eb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -20,10 +20,6 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True - #TODO(2-rec): remove/change comment - # from presets - main_workfile_extensions = ['mra'] - def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] @@ -43,17 +39,19 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): formatting_data=formatting_data ) - @classmethod - def get_main_workfile_extensions(cls): + @staticmethod + def get_main_workfile_extensions(): project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) - #TODO: find better way? (depends on other plugin) try: extensions = (project_settings["standalonepublisher"] ["publish"] ["CollectTextures"] ["main_workfile_extensions"]) except KeyError: - extensions = cls.main_workfile_extensions + raise Exception("Setting 'Main workfile extensions' not found." + " The setting must be set for the" + " 'Collect Texture' publish plugin of the" + " 'Standalone Publish' tool.") return extensions From c071724bcf16e25e2139bad404227de3ef2c83f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 10:19:28 +0100 Subject: [PATCH 174/202] remove tk_library and tcl_librabry envs for nuke processes --- openpype/hosts/hiero/addon.py | 5 +++++ openpype/hosts/nuke/addon.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index f5bb94dbaa..1cc7a8637e 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -27,7 +27,12 @@ class HieroAddon(OpenPypeModule, IHostAddon): new_hiero_paths.append(norm_path) env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + # Remove auto screen scale factor for Qt + # - let Hiero decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 1c5d5c4005..9d25afe2b6 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -27,7 +27,12 @@ class NukeAddon(OpenPypeModule, IHostAddon): new_nuke_paths.append(norm_path) env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + # Remove auto screen scale factor for Qt + # - let Nuke decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] From c0b05e5846eecf7788d7ec3866023c83e4dded70 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:50:58 +0100 Subject: [PATCH 175/202] add break and better explanation of procedure --- openpype/hosts/flame/api/plugin.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 7e012330cf..0d45792a38 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -596,6 +596,14 @@ class PublishableClip: if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): + """ + Since only one instance of hero clip is expected in + `self.vertical_clip_match`, this will loop only once + until none hero clip will be matched with hero clip. + + `tag_hierarchy_data` will be used only once for every + clip which is not hero clip. + """ _hero_data = deepcopy(hero_data) _hero_data.update({"heroTrack": False}) if _in <= self.clip_in and _out >= self.clip_out: @@ -609,6 +617,7 @@ class PublishableClip: _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag tag_hierarchy_data = _hero_data + break # add data to return data dict self.marker_data.update(tag_hierarchy_data) From ca1d518dd0dd026124b8879fdb446b34a170cc05 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 5 Dec 2022 10:53:03 +0100 Subject: [PATCH 176/202] comment improvement --- openpype/hosts/flame/api/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 0d45792a38..ca113fd98a 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -601,7 +601,7 @@ class PublishableClip: `self.vertical_clip_match`, this will loop only once until none hero clip will be matched with hero clip. - `tag_hierarchy_data` will be used only once for every + `tag_hierarchy_data` will be set only once for every clip which is not hero clip. """ _hero_data = deepcopy(hero_data) From 0151540fdd91f92c51b78a21a529c2ae913b69a9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:24:56 +0100 Subject: [PATCH 177/202] added collector which can add comment per instance --- openpype/plugins/publish/collect_comment.py | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 062142ace9..a2aef7fc1c 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -6,6 +6,28 @@ Provides: """ import pyblish.api +from openpype.lib.attribute_definitions import TextDef +from openpype.pipeline.publish import OpenPypePyblishPluginMixin + + +class CollectInstanceCommentDef( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + label = "Comment per instance" + targets = ["local"] + # Disable plugin by default + families = ["*"] + enabled = True + + def process(self, instance): + pass + + @classmethod + def get_attribute_defs(cls): + return [ + TextDef("comment", label="Comment") + ] class CollectComment(pyblish.api.ContextPlugin): From f7f5019401adf912cfba48d9d939af7492c9e3a6 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:25:14 +0100 Subject: [PATCH 178/202] CollectComment plugin also store comment on each instance --- openpype/plugins/publish/collect_comment.py | 86 ++++++++++++++++++--- 1 file changed, 76 insertions(+), 10 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a2aef7fc1c..a1b4e1364a 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -1,8 +1,26 @@ -""" -Requires: - None -Provides: - context -> comment (str) +"""Collect comment and add option to enter comment per instance. + +Combination of plugins. One define optional input for instances in Publisher +UI (CollectInstanceCommentDef) and second cares that each instance during +collection has available "comment" key in data (CollectComment). + +Plugin 'CollectInstanceCommentDef' define "comment" attribute which won't be +filled with any value if instance does not match families filter or when +plugin is disabled. + +Plugin 'CollectComment' makes sure that each instance in context has +available "comment" key in data which can be set to 'str' or 'None' if is not +set. +- In case instance already has filled comment the plugin's logic is skipped +- The comment is always set and value should be always 'str' even if is empty + +Why are separated: +- 'CollectInstanceCommentDef' can have specific settings to show comment + attribute only to defined families in publisher UI +- 'CollectComment' will run all the time + +Todos: + The comment per instance is not sent via farm. """ import pyblish.api @@ -31,11 +49,59 @@ class CollectInstanceCommentDef( class CollectComment(pyblish.api.ContextPlugin): - """This plug-ins displays the comment dialog box per default""" + """Collect comment per each instance. - label = "Collect Comment" - order = pyblish.api.CollectorOrder + Plugin makes sure each instance to publish has set "comment" in data so any + further plugin can use it directly. + """ + + label = "Collect Instance Comment" + order = pyblish.api.CollectorOrder + 0.49 def process(self, context): - comment = (context.data.get("comment") or "").strip() - context.data["comment"] = comment + context_comment = self.cleanup_comment(context.data.get("comment")) + # Set it back + context.data["comment"] = context_comment + for instance in context: + instance_label = str(instance) + # Check if comment is already set + instance_comment = self.cleanup_comment( + instance.data.get("comment")) + + # If comment on instance is not set then look for attributes + if not instance_comment: + attr_values = self.get_attr_values_from_data_for_plugin( + CollectInstanceCommentDef, instance.data + ) + instance_comment = self.cleanup_comment( + attr_values.get("comment") + ) + + # Use context comment if instance has all options of comment + # empty + if not instance_comment: + instance_comment = context_comment + + instance.data["comment"] = instance_comment + if instance_comment: + msg_end = " has comment set to: \"{}\"".format( + instance_comment) + else: + msg_end = " does not have set comment" + self.log.debug("Instance {} {}".format(instance_label, msg_end)) + + def cleanup_comment(self, comment): + """Cleanup comment value. + + Args: + comment (Union[str, None]): Comment value from data. + + Returns: + str: Cleaned comment which is stripped or empty string if input + was 'None'. + """ + + if comment: + return comment.strip() + return "" + From fd5ac3be1bc975d9e3329c835d583d269ec7c575 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:29:01 +0100 Subject: [PATCH 179/202] added settings for the attribute collector --- openpype/plugins/publish/collect_comment.py | 18 ++++++++++++++-- .../defaults/project_settings/global.json | 4 ++++ .../schemas/schema_global_publish.json | 21 +++++++++++++++++++ 3 files changed, 41 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index a1b4e1364a..db5a04681b 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -35,12 +35,26 @@ class CollectInstanceCommentDef( label = "Comment per instance" targets = ["local"] # Disable plugin by default - families = ["*"] - enabled = True + families = [] + enabled = False def process(self, instance): pass + @classmethod + def apply_settings(cls, project_setting, _): + plugin_settings = project_setting["global"]["publish"].get( + "collect_comment_per_instance" + ) + if not plugin_settings: + return + + if plugin_settings.get("enabled") is not None: + cls.enabled = plugin_settings["enabled"] + + if plugin_settings.get("families") is not None: + cls.families = plugin_settings["families"] + @classmethod def get_attribute_defs(cls): return [ diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 46b8b1b0c8..89d7cf08b7 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -24,6 +24,10 @@ ], "skip_hosts_headless_publish": [] }, + "collect_comment_per_instance": { + "enabled": false, + "families": [] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 742437fbde..f2ada5fd8d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -60,6 +60,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "collect_comment_per_instance", + "label": "Collect comment per instance", + "checkbox_key": "enabled", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From 1f05a3952262a342a72e8308643c6d1a7a0ffdba Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:09 +0100 Subject: [PATCH 180/202] use comment from instance where possible --- openpype/hosts/nuke/plugins/publish/extract_slate_frame.py | 2 +- .../deadline/plugins/publish/submit_celaction_deadline.py | 2 +- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 1 + .../ftrack/plugins/publish/integrate_ftrack_description.py | 2 +- .../modules/ftrack/plugins/publish/integrate_ftrack_note.py | 2 +- openpype/plugins/publish/extract_burnin.py | 2 +- openpype/plugins/publish/integrate.py | 2 +- openpype/plugins/publish/integrate_legacy.py | 2 +- 8 files changed, 8 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e7197b4fa8..06c086b10d 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -298,7 +298,7 @@ class ExtractSlateFrame(publish.Extractor): def add_comment_slate_node(self, instance, node): - comment = instance.context.data.get("comment") + comment = instance.data["comment"] intent = instance.context.data.get("intent") if not isinstance(intent, dict): intent = { diff --git a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea44a24459..038ee4fc03 100644 --- a/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -38,7 +38,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin): assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") + self._comment = instance.data["comment"] self._deadline_user = context.data.get( "deadlineUser", getpass.getuser()) self._frame_start = int(instance.data["frameStart"]) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 249211e965..45688e8584 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -777,6 +777,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "handleEnd": handle_end, "frameStartHandle": start - handle_start, "frameEndHandle": end + handle_end, + "comment": instance.data["comment"], "fps": fps, "source": source, "extendFrames": data.get("extendFrames"), diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index e7c265988e..6ed02bc8b6 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -38,7 +38,7 @@ class IntegrateFtrackDescription(pyblish.api.InstancePlugin): self.log.info("There are any integrated AssetVersions") return - comment = (instance.context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index ac3fa874e0..6776509dda 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,7 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = (context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317..fd8dfdece9 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -468,7 +468,7 @@ class ExtractBurnin(publish.Extractor): burnin_data.update({ "version": int(version), - "comment": context.data.get("comment") or "" + "comment": instance.data["comment"] }) intent_label = context.data.get("intent") or "" diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 7e4fc84658..57a642c635 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -772,7 +772,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": instance.data.get("fps", context.data.get("fps")) } diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c..670b637faa 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -968,7 +968,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": context.data.get( "fps", instance.data.get("fps") From 5d24bfcf6318fa4fec1267612c933989fa2beb22 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:31:31 +0100 Subject: [PATCH 181/202] commit forgotten change of getting attribute values from plugin --- openpype/pipeline/publish/publish_plugins.py | 26 ++++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6e2be1ce2c..47dfaf6b98 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +import inspect from abc import ABCMeta import pyblish.api @@ -132,6 +133,25 @@ class OpenPypePyblishPluginMixin: ) return attribute_values + @staticmethod + def get_attr_values_from_data_for_plugin(plugin, data): + """Get attribute values for attribute definitions from data. + + Args: + plugin (Union[publish.api.Plugin, Type[publish.api.Plugin]]): The + plugin for which attributes are extracted. + data(dict): Data from instance or context. + """ + + if not inspect.isclass(plugin): + plugin = plugin.__class__ + + return ( + data + .get("publish_attributes", {}) + .get(plugin.__name__, {}) + ) + def get_attr_values_from_data(self, data): """Get attribute values for attribute definitions from data. @@ -139,11 +159,7 @@ class OpenPypePyblishPluginMixin: data(dict): Data from instance or context. """ - return ( - data - .get("publish_attributes", {}) - .get(self.__class__.__name__, {}) - ) + return self.get_attr_values_from_data_for_plugin(self.__class__, data) class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): From e6585be6772006ca748c8fbf5697f981bcd0de12 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 5 Dec 2022 19:35:03 +0100 Subject: [PATCH 182/202] fix missing method --- openpype/plugins/publish/collect_comment.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index db5a04681b..83609a04bd 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -62,7 +62,10 @@ class CollectInstanceCommentDef( ] -class CollectComment(pyblish.api.ContextPlugin): +class CollectComment( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): """Collect comment per each instance. Plugin makes sure each instance to publish has set "comment" in data so any From 6e520f564bfe58aa23c1430d175d30dccd95eb40 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 09:52:36 +0100 Subject: [PATCH 183/202] removed redundant line --- openpype/plugins/publish/collect_comment.py | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 83609a04bd..12579cd957 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -121,4 +121,3 @@ class CollectComment( if comment: return comment.strip() return "" - From fffe1162b0ab68dbd3ea5e0e234c4b2b6cf51ac8 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:00:16 +0100 Subject: [PATCH 184/202] fix import --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index db567f8b8f..37a5e90f86 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -7,7 +7,7 @@ from openpype.client import ( get_last_versions, get_representations, ) -from openpype.pipeline import get_representation_path_with_anatomy +from openpype.pipeline.load import get_representation_path_with_anatomy class CollectAudio(pyblish.api.ContextPlugin): From ad1380541ff48ebc495baacfcfd65c83952f81a7 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 10:17:03 +0100 Subject: [PATCH 185/202] fix fields query --- openpype/plugins/publish/collect_audio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 37a5e90f86..3a0ddb3281 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -121,7 +121,7 @@ class CollectAudio(pyblish.api.ContextPlugin): asset_docs = get_assets( project_name, asset_names=asset_names, - fields=["_id"] + fields=["_id", "name"] ) asset_id_by_name = {} From 63b47efc51f92c8082ad76a3154a45703e80423c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:02:03 +0100 Subject: [PATCH 186/202] integrate thumbnail looks for thumbnail to multiple places --- .../plugins/publish/integrate_thumbnail.py | 58 +++++++++++++++++-- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f74c3d9609..cd472a7e6b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -102,8 +102,56 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): thumbnail_root ) + def _get_thumbnail_from_instance(self, instance): + # 1. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + + # 2. Look for thumbnail in published representations + published_repres = instance.data.get("published_representations") + path = self._get_thumbnail_path_from_published(published_repres) + if path and os.path.exists(path): + return path + + if path: + self.log.warning( + "Could not find published thumbnail path {}".format(path) + ) + + # 3. Look for thumbnail in "not published" representations + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _prepare_instances(self, context): - context_thumbnail_path = context.get("thumbnailPath") + context_thumbnail_path = context.data.get("thumbnailPath") valid_context_thumbnail = False if context_thumbnail_path and os.path.exists(context_thumbnail_path): valid_context_thumbnail = True @@ -122,8 +170,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): continue # Find thumbnail path on instance - thumbnail_path = self._get_instance_thumbnail_path( - published_repres) + thumbnail_path = self._get_thumbnail_from_instance(instance) if thumbnail_path: self.log.debug(( "Found thumbnail path for instance \"{}\"." @@ -157,7 +204,10 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): for repre_info in published_representations.values(): return repre_info["representation"]["parent"] - def _get_instance_thumbnail_path(self, published_representations): + def _get_thumbnail_path_from_published(self, published_representations): + if not published_representations: + return None + thumb_repre_doc = None for repre_info in published_representations.values(): repre_doc = repre_info["representation"] From 21411d50624385122d200b0d0317a54b26d83e50 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:32 +0100 Subject: [PATCH 187/202] store thumbnail path to instance data --- openpype/plugins/publish/extract_thumbnail_from_source.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da1213807..03df1455e2 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -73,6 +73,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin): "Adding thumbnail representation: {}".format(new_repre) ) instance.data["representations"].append(new_repre) + instance.data["thumbnailPath"] = dst_filepath def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: From aa704b40eaa42bd3e4184dd6b754cfcf8f3069f1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:14:52 +0100 Subject: [PATCH 188/202] change order of thumbnail path resolving --- .../plugins/publish/integrate_thumbnail.py | 40 ++++++++----------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index cd472a7e6b..f1455dc66b 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -103,12 +103,7 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) def _get_thumbnail_from_instance(self, instance): - # 1. Look for thumbnail path on instance in 'thumbnailPath' - thumbnail_path = instance.data.get("thumbnailPath") - if thumbnail_path and os.path.exists(thumbnail_path): - return thumbnail_path - - # 2. Look for thumbnail in published representations + # 1. Look for thumbnail in published representations published_repres = instance.data.get("published_representations") path = self._get_thumbnail_path_from_published(published_repres) if path and os.path.exists(path): @@ -119,34 +114,33 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): "Could not find published thumbnail path {}".format(path) ) - # 3. Look for thumbnail in "not published" representations + # 2. Look for thumbnail in "not published" representations repres = instance.data.get("representations") - if not repres: - return None - thumbnail_repre = next( ( repre - for repre in repres + for repre in repres or [] if repre["name"] == "thumbnail" ), None ) - if not thumbnail_repre: - return None + if thumbnail_repre: + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") + filename = thumbnail_repre.get("files") + if isinstance(filename, (list, tuple, set)): + filename = filename[0] - filename = thumbnail_repre.get("files") - if not staging_dir or not filename: - return None + if staging_dir and filename: + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): + # 3. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): return thumbnail_path return None From 608afc35465bf17d541cc58e8922e36580949787 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 15:19:04 +0100 Subject: [PATCH 189/202] move unpublished representations logic to separated method --- .../plugins/publish/integrate_thumbnail.py | 57 ++++++++++++------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f1455dc66b..809a1782e0 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -115,28 +115,9 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): ) # 2. Look for thumbnail in "not published" representations - repres = instance.data.get("representations") - thumbnail_repre = next( - ( - repre - for repre in repres or [] - if repre["name"] == "thumbnail" - ), - None - ) - if thumbnail_repre: - staging_dir = thumbnail_repre.get("stagingDir") - if not staging_dir: - staging_dir = instance.data.get("stagingDir") - - filename = thumbnail_repre.get("files") - if isinstance(filename, (list, tuple, set)): - filename = filename[0] - - if staging_dir and filename: - thumbnail_path = os.path.join(staging_dir, filename) - if os.path.exists(thumbnail_path): - return thumbnail_path + thumbnail_path = self._get_thumbnail_path_from_unpublished(instance) + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path # 3. Look for thumbnail path on instance in 'thumbnailPath' thumbnail_path = instance.data.get("thumbnailPath") @@ -223,6 +204,38 @@ class IntegrateThumbnails(pyblish.api.ContextPlugin): return None return os.path.normpath(path) + def _get_thumbnail_path_from_unpublished(self, instance): + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _integrate_thumbnails( self, filtered_instance_items, From c764dc20c641bb6ef58df1c4b29f7490b6417276 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:00:49 +0100 Subject: [PATCH 190/202] normalize paths when added to queue --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 1626bec6b6..ce7ef100c1 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -66,8 +66,8 @@ class FileTransaction(object): """Add a new file to transfer queue""" opts = {"mode": mode} - src = os.path.abspath(src) - dst = os.path.abspath(dst) + src = os.path.normpath(os.path.abspath(src)) + dst = os.path.normpath(os.path.abspath(dst)) if dst in self._transfers: queued_src = self._transfers[dst][0] From 18a9c5568426f6b67dc23d90742c6ac140e38800 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:21 +0100 Subject: [PATCH 191/202] skip if source and destination are the same paths --- openpype/lib/file_transaction.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index ce7ef100c1..4aedc62fb6 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -84,9 +84,11 @@ class FileTransaction(object): self._transfers[dst] = (src, opts) def process(self): - # Backup any existing files - for dst in self._transfers.keys(): + for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + continue + if os.path.exists(dst): # Backup original file # todo: add timestamp or uuid to ensure unique @@ -98,6 +100,12 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): + if not os.path.isdir(src) and dst == src: + self.log.debug( + "Source and destionation are same files {} -> {}".format( + src, dst)) + continue + self._create_folder_for_file(dst) if opts["mode"] == self.MODE_COPY: From 36dcab11c1c54cec6040456de8ec74ee20635111 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:02:52 +0100 Subject: [PATCH 192/202] formatting changes --- openpype/lib/file_transaction.py | 60 ++++++++++++++++++-------------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 4aedc62fb6..2d706adaef 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -14,9 +14,9 @@ else: class FileTransaction(object): - """ + """File transaction with rollback options. - The file transaction is a three step process. + The file transaction is a three-step process. 1) Rename any existing files to a "temporary backup" during `process()` 2) Copy the files to final destination during `process()` @@ -39,14 +39,12 @@ class FileTransaction(object): Warning: Any folders created during the transfer will not be removed. - """ MODE_COPY = 0 MODE_HARDLINK = 1 def __init__(self, log=None): - if log is None: log = logging.getLogger("FileTransaction") @@ -63,7 +61,14 @@ class FileTransaction(object): self._backup_to_original = {} def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" + """Add a new file to transfer queue. + + Args: + src (str): Source path. + dst (str): Destination path. + mode (MODE_COPY, MODE_HARDLINK): Transfer mode. + """ + opts = {"mode": mode} src = os.path.normpath(os.path.abspath(src)) @@ -72,14 +77,15 @@ class FileTransaction(object): if dst in self._transfers: queued_src = self._transfers[dst][0] if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) + self.log.debug( + "File transfer was already in queue: {} -> {}".format( + src, dst)) return else: self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) + self.log.debug( + "Removed from queue: {} -> {} replaced by {} -> {}".format( + queued_src, dst, src, dst)) self._transfers[dst] = (src, opts) @@ -94,8 +100,8 @@ class FileTransaction(object): # todo: add timestamp or uuid to ensure unique backup = dst + ".bak" self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) os.rename(dst, backup) # Copy the files to transfer @@ -112,8 +118,8 @@ class FileTransaction(object): self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) + self.log.debug("Hardlinking file ... {} -> {}".format( + src, dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -124,23 +130,21 @@ class FileTransaction(object): try: os.remove(backup) except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) + self.log.error( + "Failed to remove backup file: {}".format(backup), + exc_info=True) def rollback(self): - errors = 0 - # Rollback any transferred files for path in self._transferred: try: os.remove(path) except OSError: errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) + self.log.error( + "Failed to rollback created file: {}".format(path), + exc_info=True) # Rollback the backups for backup, original in self._backup_to_original.items(): @@ -148,13 +152,15 @@ class FileTransaction(object): os.rename(backup, original) except OSError: errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) + self.log.error( + "Failed to restore original file: {} -> {}".format( + backup, original), + exc_info=True) if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) + self.log.error( + "{} errors occurred during rollback.".format(errors), + exc_info=True) six.reraise(*sys.exc_info()) @property From ee71a051b6066011fc4cfe8cd261de8fe9081fad Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:05:14 +0100 Subject: [PATCH 193/202] removed redundant check of directory --- openpype/lib/file_transaction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 2d706adaef..6f285d73a8 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -92,7 +92,7 @@ class FileTransaction(object): def process(self): # Backup any existing files for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: continue if os.path.exists(dst): @@ -106,7 +106,7 @@ class FileTransaction(object): # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): - if not os.path.isdir(src) and dst == src: + if dst == src: self.log.debug( "Source and destionation are same files {} -> {}".format( src, dst)) From 9f2cd89e1521bca7af39927d09655867a082456f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:06:52 +0100 Subject: [PATCH 194/202] remove unused variable --- openpype/lib/file_transaction.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 6f285d73a8..f265b8815c 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -91,18 +91,17 @@ class FileTransaction(object): def process(self): # Backup any existing files - for dst, (src, opts) in self._transfers.items(): - if dst == src: + for dst, (src, _) in self._transfers.items(): + if dst == src or not os.path.exists(dst): continue - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug( - "Backup existing file: {} -> {}".format(dst, backup)) - os.rename(dst, backup) + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) + os.rename(dst, backup) # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): From 2c55ee55c266dbfe90394918e604ed87c51d619e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 6 Dec 2022 16:09:54 +0100 Subject: [PATCH 195/202] remove source and destination check from integrate --- openpype/plugins/publish/integrate.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 57a642c635..6a85a87129 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -291,9 +291,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin): instance) for src, dst in prepared["transfers"]: - if src == dst: - continue - # todo: add support for hardlink transfers file_transactions.add(src, dst) From 1c985ca0015ce4e3161e18a91205a4590401e243 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 6 Dec 2022 23:45:51 +0100 Subject: [PATCH 196/202] :bug: fix publishing of alembics --- openpype/hosts/max/__init__.py | 2 +- openpype/hosts/max/api/__init__.py | 11 +- openpype/hosts/max/api/lib.py | 78 ++++++++++++-- openpype/hosts/max/api/pipeline.py | 9 +- openpype/hosts/max/api/plugin.py | 15 +-- .../max/plugins/publish/collect_workfile.py | 63 +++++++++++ .../max/plugins/publish/extract_pointcache.py | 100 ++++++++++++++++++ .../plugins/publish/validate_scene_saved.py | 19 ++++ 8 files changed, 272 insertions(+), 25 deletions(-) create mode 100644 openpype/hosts/max/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/max/plugins/publish/extract_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/validate_scene_saved.py diff --git a/openpype/hosts/max/__init__.py b/openpype/hosts/max/__init__.py index 8da0e0ee42..9a5af8258c 100644 --- a/openpype/hosts/max/__init__.py +++ b/openpype/hosts/max/__init__.py @@ -7,4 +7,4 @@ from .addon import ( __all__ = ( "MaxAddon", "MAX_HOST_DIR", -) \ No newline at end of file +) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 503afade73..26190dcfb8 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -2,10 +2,19 @@ """Public API for 3dsmax""" from .pipeline import ( - MaxHost + MaxHost, ) +from .lib import( + maintained_selection, + lsattr, + get_all_children +) + __all__ = [ "MaxHost", + "maintained_selection", + "lsattr", + "get_all_children" ] diff --git a/openpype/hosts/max/api/lib.py b/openpype/hosts/max/api/lib.py index 8a57bb1bf6..9256ca9ac1 100644 --- a/openpype/hosts/max/api/lib.py +++ b/openpype/hosts/max/api/lib.py @@ -1,7 +1,13 @@ # -*- coding: utf-8 -*- """Library of functions useful for 3dsmax pipeline.""" +import json +import six from pymxs import runtime as rt from typing import Union +import contextlib + + +JSON_PREFIX = "JSON::" def imprint(node_name: str, data: dict) -> bool: @@ -10,7 +16,10 @@ def imprint(node_name: str, data: dict) -> bool: return False for k, v in data.items(): - rt.setUserProp(node, k, v) + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) return True @@ -39,10 +48,13 @@ def lsattr( nodes = [] output_node(root, nodes) - if not value: - return [n for n in nodes if rt.getUserProp(n, attr)] - - return [n for n in nodes if rt.getUserProp(n, attr) == value] + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] def read(container) -> dict: @@ -53,12 +65,58 @@ def read(container) -> dict: return data for line in props.split("\r\n"): - key, value = line.split("=") - # if the line cannot be split we can't really parse it - if not key: + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it continue - data[key.strip()] = value.strip() - data["instance_node"] = container + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index cef45193c4..4f8271fb7e 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- """Pipeline tools for OpenPype Houdini integration.""" import os -import sys import logging -import contextlib import json @@ -101,12 +99,12 @@ attributes "OpenPypeContext" ( context type: #string ) - + rollout params "OpenPype Parameters" ( editText editTextContext "Context" type: #string ) -) +) """) attr = rt.execute(create_attr_script) @@ -149,6 +147,3 @@ def ls() -> list: for container in sorted(containers, key=lambda name: container.name): yield lib.read(container) - - - diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py index 0f01c94ce1..4788bfd383 100644 --- a/openpype/hosts/max/api/plugin.py +++ b/openpype/hosts/max/api/plugin.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- """3dsmax specific Avalon/Pyblish plugin definitions.""" -import sys from pymxs import runtime as rt import six from abc import ABCMeta @@ -25,12 +24,12 @@ class MaxCreatorBase(object): shared_data["max_cached_subsets"] = {} cached_instances = lsattr("id", "pyblish.avalon.instance") for i in cached_instances: - creator_id = i.get("creator_identifier") + creator_id = rt.getUserProp(i, "creator_identifier") if creator_id not in shared_data["max_cached_subsets"]: - shared_data["houdini_cached_subsets"][creator_id] = [i] + shared_data["max_cached_subsets"][creator_id] = [i.name] else: shared_data[ - "houdini_cached_subsets"][creator_id].append(i) # noqa + "max_cached_subsets"][creator_id].append(i.name) # noqa return shared_data @staticmethod @@ -61,8 +60,12 @@ class MaxCreator(Creator, MaxCreatorBase): instance_data, self ) + for node in self.selected_nodes: + node.Parent = instance_node + self._add_instance_to_context(instance) imprint(instance_node.name, instance.data_to_store()) + return instance def collect_instances(self): @@ -70,7 +73,7 @@ class MaxCreator(Creator, MaxCreatorBase): for instance in self.collection_shared_data[ "max_cached_subsets"].get(self.identifier, []): created_instance = CreatedInstance.from_existing( - read(instance), self + read(rt.getNodeByName(instance)), self ) self._add_instance_to_context(created_instance) @@ -98,7 +101,7 @@ class MaxCreator(Creator, MaxCreatorBase): instance_node = rt.getNodeByName( instance.data.get("instance_node")) if instance_node: - rt.delete(instance_node) + rt.delete(rt.getNodeByName(instance_node)) self._remove_instance_from_context(instance) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 0000000000..7112337575 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io, KnownPublishError + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 0000000000..904c1656da --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 0000000000..6392b12d11 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from openpype.pipeline.publish import RepairAction +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label) From d29a3ca4379a88202bc4279fe8966d87a3509820 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:17:21 +0100 Subject: [PATCH 197/202] :art: simple loader for alembics --- .../hosts/max/plugins/load/load_pointcache.py | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 openpype/hosts/max/plugins/load/load_pointcache.py diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py new file mode 100644 index 0000000000..150206b8b8 --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +"""Simple alembic loader for 3dsmax. + +Because of limited api, alembics can be only loaded, but not easily updated. + +""" +import os +from openpype.pipeline import ( + load, + get_representation_path, +) + + +class AbcLoader(load.LoaderPlugin): + """Alembic loader.""" + + families = ["model", "animation", "pointcache"] + label = "Load Alembic" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_export_cmd = (f""" +AlembicImport.ImportToRoot = false + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + rt.execute(abc_export_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + container_name = f"{name}_CON" + container = rt.container(name=container_name) + abc_container.Parent = container + + return container + + def remove(self, container): + from pymxs import runtime as rt + + node = container["node"] + rt.delete(node) From 7327334226c45fc0291c3b08e041cb8fc7fa328b Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:20:56 +0100 Subject: [PATCH 198/202] :rotating_light: fix :dog: --- openpype/hosts/max/api/__init__.py | 2 +- openpype/hosts/max/api/pipeline.py | 6 +----- openpype/hosts/max/plugins/publish/collect_workfile.py | 2 +- openpype/hosts/max/plugins/publish/validate_scene_saved.py | 3 +-- openpype/hosts/max/startup/startup.py | 1 - 5 files changed, 4 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/max/api/__init__.py b/openpype/hosts/max/api/__init__.py index 26190dcfb8..92097cc98b 100644 --- a/openpype/hosts/max/api/__init__.py +++ b/openpype/hosts/max/api/__init__.py @@ -6,7 +6,7 @@ from .pipeline import ( ) -from .lib import( +from .lib import ( maintained_selection, lsattr, get_all_children diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py index 4f8271fb7e..f3cdf245fb 100644 --- a/openpype/hosts/max/api/pipeline.py +++ b/openpype/hosts/max/api/pipeline.py @@ -15,11 +15,7 @@ from openpype.pipeline import ( from openpype.hosts.max.api.menu import OpenPypeMenu from openpype.hosts.max.api import lib from openpype.hosts.max import MAX_HOST_DIR -from openpype.pipeline.load import any_outdated_containers -from openpype.lib import ( - register_event_callback, - emit_event, -) + from pymxs import runtime as rt # noqa log = logging.getLogger("openpype.hosts.max") diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py index 7112337575..3500b2735c 100644 --- a/openpype/hosts/max/plugins/publish/collect_workfile.py +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -4,7 +4,7 @@ import os import pyblish.api from pymxs import runtime as rt -from openpype.pipeline import legacy_io, KnownPublishError +from openpype.pipeline import legacy_io class CollectWorkfile(pyblish.api.ContextPlugin): diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py index 6392b12d11..8506b17315 100644 --- a/openpype/hosts/max/plugins/publish/validate_scene_saved.py +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline import PublishValidationError -from openpype.pipeline.publish import RepairAction +from openpype.pipeline import PublishValidationError from pymxs import runtime as rt diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py index afcbd2d132..37bcef5db1 100644 --- a/openpype/hosts/max/startup/startup.py +++ b/openpype/hosts/max/startup/startup.py @@ -4,4 +4,3 @@ from openpype.pipeline import install_host host = MaxHost() install_host(host) - From 75606777695064693dca411bd47455988a669c14 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:23:22 +0100 Subject: [PATCH 199/202] :rotating_light: fix hound round 2 --- openpype/hosts/max/plugins/create/create_pointcache.py | 3 ++- openpype/hosts/max/plugins/load/load_pointcache.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index 4c9ec7fb97..c08b0dedfe 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -18,4 +18,5 @@ class CreatePointCache(plugin.MaxCreator): instance_data, pre_create_data) # type: CreatedInstance - instance_node = rt.getNodeByName(instance.get("instance_node")) + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py index 150206b8b8..285d84b7b6 100644 --- a/openpype/hosts/max/plugins/load/load_pointcache.py +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -6,8 +6,7 @@ Because of limited api, alembics can be only loaded, but not easily updated. """ import os from openpype.pipeline import ( - load, - get_representation_path, + load ) From ad95165765bc0841305888af177888bfaf7d1357 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:25:15 +0100 Subject: [PATCH 200/202] :rotating_light: fix hound round 3 --- openpype/hosts/max/plugins/create/create_pointcache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py index c08b0dedfe..32f0838471 100644 --- a/openpype/hosts/max/plugins/create/create_pointcache.py +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -11,9 +11,9 @@ class CreatePointCache(plugin.MaxCreator): icon = "gear" def create(self, subset_name, instance_data, pre_create_data): - from pymxs import runtime as rt + # from pymxs import runtime as rt - instance = super(CreatePointCache, self).create( + _ = super(CreatePointCache, self).create( subset_name, instance_data, pre_create_data) # type: CreatedInstance From f4391cbeb2245e132f561cbdc89b8aefc88b06cb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 7 Dec 2022 01:39:28 +0100 Subject: [PATCH 201/202] :recycle: add 3dsmax 2023 variant --- .../system_settings/applications.json | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index a4db0dd327..b8aa8cec74 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -114,6 +114,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "max", + "environment": { + "ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup" + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "flame": { "enabled": true, "label": "Flame", @@ -1309,35 +1338,6 @@ } } }, - "3dsmax": { - "enabled": true, - "label": "3ds max", - "icon": "{}/app_icons/3dsmax.png", - "host_name": "3dsmax", - "environment": { - - }, - "variants": { - "2023": { - "use_python_2": false, - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "3DSMAX_VERSION": "2023" - } - } - } - }, "djvview": { "enabled": true, "label": "DJV View", From 99930c2856ac882f8029000a6825911d48ac68c6 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 7 Dec 2022 03:30:40 +0000 Subject: [PATCH 202/202] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 5e61ee3a6b..443c76544b 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.1" +__version__ = "3.14.9-nightly.2"